From d3e0ec157dc0d12d529057ec86170a9e7a5b6b7d Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Sat, 13 Apr 2024 18:57:26 +0200 Subject: [PATCH 001/130] Add release highlight for #106068 (#107418) * Added highlight for #106068 --- docs/changelog/106068.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/changelog/106068.yaml b/docs/changelog/106068.yaml index fbc30aa86a33e..51bcc2bcf98b0 100644 --- a/docs/changelog/106068.yaml +++ b/docs/changelog/106068.yaml @@ -3,3 +3,19 @@ summary: Add `modelId` and `modelText` to `KnnVectorQueryBuilder` area: Search type: enhancement issues: [] +highlight: + title: Query phase KNN now supports query_vector_builder + body: |- + It is now possible to pass `model_text` and `model_id` within a `knn` query + in the [query DSL](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-knn-query.html) to convert a text query into a dense vector and run the + nearest neighbor query on it, instead of requiring the dense vector to be + directly passed (within the `query_vector` parameter). Similar to the + [top-level knn query](https://www.elastic.co/guide/en/elasticsearch/reference/current/knn-search.html) (executed in the DFS phase), it is possible to supply + a `query_vector_builder` object containing a `text_embedding` object with + `model_text` (the text query to be converted into a dense vector) and + `model_id` (the identifier of a deployed model responsible for transforming + the text query into a dense vector). Note that an embedding model with the + referenced `model_id` needs to be [deployed on a ML node](https://www.elastic.co/guide/en/machine-learning/current/ml-nlp-deploy-models.html). + in the cluster. + notable: true + From b9322da325e6147f5b6314bea2ab3859b07f3695 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 15 Apr 2024 10:31:09 +0200 Subject: [PATCH 002/130] [DOCS] Fixes section levels in POST inference API docs. (#107451) --- docs/reference/inference/post-inference.asciidoc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 8c6a219f8e247..023566d3e40ee 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -54,8 +54,10 @@ The unique identifier of the {infer} endpoint. (Optional, string) The type of {infer} task that the model performs. + +[discrete] [[post-inference-api-query-params]] -== {api-query-parms-title} +==== {api-query-parms-title} `timeout`:: (Optional, timeout) @@ -64,7 +66,7 @@ seconds. [discrete] [[post-inference-api-request-body]] -== {api-request-body-title} +==== {api-request-body-title} `input`:: (Required, array of strings) From 4dfcb0897eba48fb81ff1df2cfa106ecebf2d89c Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Mon, 15 Apr 2024 11:02:18 +0200 Subject: [PATCH 003/130] Fetch meta fields in FetchFieldsPhase using ValueFetcher (#106325) Here we extract the logic to populate metadata fields such as _ignored, _routing, _size and the deprecated _type into FetchFieldsPhase so that we can use the ValueFetcher interface to retrieve field values. This allows us to fetch values no matter if the Mapper uses stored or doc values. --- docs/reference/search/profile.asciidoc | 22 +++ .../rest-api-spec/test/30_inner_hits.yml | 17 +- .../test/get/120_stored_fields_ignored.yml | 159 ++++++++++++++++++ .../rest-api-spec/test/search/370_profile.yml | 54 +++--- .../test/search/520_fetch_fields.yml | 159 ++++++++++++++++++ .../fetch/subphase/FetchFieldsPhase.java | 77 +++++++-- .../fetch/subphase/StoredFieldsPhase.java | 30 +--- 7 files changed, 453 insertions(+), 65 deletions(-) create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/120_stored_fields_ignored.yml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 5b63929934770..3fed14231808c 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -197,6 +197,17 @@ The API returns the following result: "stored_fields": ["_id", "_routing", "_source"] }, "children": [ + { + "type" : "FetchFieldsPhase", + "description" : "", + "time_in_nanos" : 238762, + "breakdown" : { + "process_count" : 5, + "process" : 227914, + "next_reader" : 10848, + "next_reader_count" : 1 + } + }, { "type": "FetchSourcePhase", "description": "", @@ -1043,6 +1054,17 @@ And here is the fetch profile: "stored_fields": ["_id", "_routing", "_source"] }, "children": [ + { + "type" : "FetchFieldsPhase", + "description" : "", + "time_in_nanos" : 238762, + "breakdown" : { + "process_count" : 5, + "process" : 227914, + "next_reader" : 10848, + "next_reader_count" : 1 + } + }, { "type": "FetchSourcePhase", "description": "", diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml index 6395d3e0f8db4..a561ebbae00e9 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml @@ -120,8 +120,8 @@ teardown: --- profile fetch: - skip: - version: ' - 7.15.99' - reason: fetch profiling implemented in 7.16.0 + version: ' - 8.13.99' + reason: fetch fields and stored_fields using ValueFetcher - do: search: @@ -141,15 +141,20 @@ profile fetch: - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } - - length: { profile.shards.0.fetch.children: 3 } - - match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase } + - length: { profile.shards.0.fetch.children: 4 } + - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - - match: { profile.shards.0.fetch.children.1.type: InnerHitsPhase } + - match: { profile.shards.0.fetch.children.1.type: FetchSourcePhase } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } - - match: { profile.shards.0.fetch.children.2.type: StoredFieldsPhase } + - match: { profile.shards.0.fetch.children.2.type: InnerHitsPhase } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.3.type: StoredFieldsPhase } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/120_stored_fields_ignored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/120_stored_fields_ignored.yml new file mode 100644 index 0000000000000..c442c2a3e96ab --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/120_stored_fields_ignored.yml @@ -0,0 +1,159 @@ +--- +"_ignored field through get api using stored_fields": + - do: + indices.create: + index: test + body: + mappings: + properties: + keyword: + type: keyword + ignore_above: 5 + ip: + type: ip + ignore_malformed: true + value: + type: long + ignore_malformed: true + + - do: + index: + index: test + id: 1 + refresh: true + body: + keyword: foo + ip: 192.168.0.1 + value: 23 + - do: + index: + index: test + id: 2 + refresh: true + body: + keyword: foobar + ip: garbage + value: missing + - do: + index: + index: test + id: 3 + refresh: true + body: + keyword: + - foo + - bar + - foobar + ip: + - 10.10.1.1 + - 192.8.1.2 + - 199.199.300.999 + value: + - 1 + - 2 + - ops + + - do: + get: + index: test + id: 1 + + - match: {_index: "test"} + - match: {_id: "1"} + - match: {_version: 1} + - match: {found: true} + - match: + _source: + keyword: foo + ip: 192.168.0.1 + value: 23 + + - is_false: fields + + - do: + get: + index: test + id: 2 + - match: { _index: "test" } + - match: { _id: "2" } + - match: { _version: 1 } + - match: { found: true } + - match: + _source: + ip: garbage + keyword: foobar + value: missing + + - is_false: fields + + - do: + get: + index: test + id: 3 + - match: { _index: "test" } + - match: { _id: "3" } + - match: { _version: 1 } + - match: { found: true } + - match: + _source: + ip: + - 10.10.1.1 + - 192.8.1.2 + - 199.199.300.999 + keyword: + - foo + - bar + - foobar + value: + - 1 + - 2 + - ops + + - is_false: fields + + - do: + get: + index: test + id: 1 + stored_fields: + - _ignored + + - match: { _index: "test" } + - match: { _id: "1" } + - match: { _version: 1 } + - match: { found: true } + - match: { _ignored: null} + + - do: + get: + index: test + id: 2 + stored_fields: + - _ignored + + - match: { _index: "test" } + - match: { _id: "2" } + - match: { _version: 1 } + - match: { found: true } + - match: + _ignored: + - ip + - keyword + - value + + - do: + get: + index: test + id: 3 + stored_fields: + - _ignored + + - match: { _index: "test" } + - match: { _id: "3" } + - match: { _version: 1 } + - match: { found: true } + - match: + _ignored: + - ip + - keyword + - value diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 201bba70ca5a5..200f7292291b1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -22,8 +22,8 @@ setup: --- fetch fields: - skip: - version: ' - 8.5.99' - reason: stored fields phase added in 8.6 + version: ' - 8.13.99' + reason: fetch fields and stored_fields using ValueFetcher - do: search: @@ -44,17 +44,21 @@ fetch fields: - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } - length: { profile.shards.0.fetch.children: 2 } - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } - - match: { profile.shards.0.fetch.children.1.type: StoredFieldsPhase } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.1.type: StoredFieldsPhase } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } --- fetch source: - skip: - version: ' - 8.5.99' - reason: stored fields phase added in 8.6 + version: ' - 8.13.99' + reason: fetch fields and stored_fields using ValueFetcher - do: search: @@ -71,20 +75,21 @@ fetch source: - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } - - length: { profile.shards.0.fetch.children: 2 } - - match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - - match: { profile.shards.0.fetch.children.0.debug.fast_path: 1 } - - match: { profile.shards.0.fetch.children.1.type: StoredFieldsPhase } + - length: { profile.shards.0.fetch.children: 3 } + - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } + - match: { profile.shards.0.fetch.children.1.type: FetchSourcePhase } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.1.debug.fast_path: 1 } + - match: { profile.shards.0.fetch.children.2.type: StoredFieldsPhase } --- fetch nested source: - skip: - version: ' - 8.5.99' - reason: stored fields phase added in 8.6 + version: ' - 8.13.99' + reason: fetch fields and stored_fields using ValueFetcher - do: indices.create: @@ -135,24 +140,25 @@ fetch nested source: - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } - - length: { profile.shards.0.fetch.children: 3 } - - match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } - - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } - - match: { profile.shards.0.fetch.children.1.type: InnerHitsPhase } + - length: { profile.shards.0.fetch.children: 4 } + - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } + - match: { profile.shards.0.fetch.children.1.type: FetchSourcePhase } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } - - match: { profile.shards.0.fetch.children.2.type: StoredFieldsPhase } + - match: { profile.shards.0.fetch.children.2.type: InnerHitsPhase } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.2.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.3.type: StoredFieldsPhase } --- disabling stored fields removes fetch sub phases: - skip: version: ' - 7.15.99' - reason: fetch profiling implemented in 7.16.0 + reason: fetch profiling implemented in 7.16.0 - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml new file mode 100644 index 0000000000000..ad74cd2ccd795 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml @@ -0,0 +1,159 @@ +--- +setup: + - do: + indices.create: + index: test + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + stored_keyword: + type: keyword + store: true + keyword: + type: keyword + stored_value: + type: integer + store: true + value: + type: integer + ignored_keyword: + type: keyword + ignore_above: 3 + ignored_value: + type: integer + ignore_malformed: true + + - do: + index: + index: test + id: "1" + refresh: true + body: + stored_keyword: "stored_keyword_value" + keyword: "keyword_value" + stored_value: 10 + value: 100 + ignored_keyword: "foobar" + ignored_value: foobar + +--- +fetch stored fields: + + - do: + search: + index: test + body: + stored_fields: [ stored_keyword, stored_value, keyword, value ] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields.stored_keyword.0: "stored_keyword_value" } + - match: { hits.hits.0.fields.stored_value.0: 10 } + - match: { hits.hits.0.fields.keyword: null } + - match: { hits.hits.0.fields.value: null } + +--- +fetch fields: + + - do: + search: + index: test + body: + fields: [ stored_keyword, stored_value, keyword, value ] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields.stored_keyword.0: "stored_keyword_value" } + - match: { hits.hits.0.fields.stored_value.0: 10 } + - match: { hits.hits.0.fields.keyword.0: "keyword_value" } + - match: { hits.hits.0.fields.value.0: 100 } + +--- +fetch fields and stored fields: + + - do: + search: + index: test + body: + fields: [ keyword, stored_value ] + stored_fields: [ stored_keyword, value ] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields.stored_keyword.0: "stored_keyword_value" } + - match: { hits.hits.0.fields.stored_value.0: 10 } + - match: { hits.hits.0.fields.keyword.0: "keyword_value" } + - match: { hits.hits.0.fields.value: null } + +--- +fetch _ignored via stored_fields: + + - do: + search: + index: test + body: + stored_fields: [ _ignored ] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields._ignored: null } + - match: { hits.hits.0._ignored.0: "ignored_keyword" } + - match: { hits.hits.0._ignored.1: "ignored_value" } + +--- +fetch _ignored via fields: + + - do: + search: + index: test + body: + fields: [ _ignored ] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields._ignored.0: "ignored_keyword" } + - match: { hits.hits.0.fields._ignored.1: "ignored_value" } + - match: { hits.hits.0._ignored.0: "ignored_keyword" } + - match: { hits.hits.0._ignored.1: "ignored_value" } + +--- +fetch _seq_no via stored_fields: + + - do: + search: + index: test + body: + stored_fields: [ _seq_no ] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields._seq_no: null } + - match: { hits.hits.0._seq_no: null } + +--- +fetch _seq_no via fields: + + - do: + catch: "request" + search: + index: test + body: + fields: [ _seq_no ] + + # This should be `unauthorized` (401) or `forbidden` (403) or at least `bad request` (400) + # while instead it is mapped to an `internal_server_error (500)` + - match: { status: 500 } + - match: { error.root_cause.0.type: unsupported_operation_exception } + +--- +fetch fields with none stored_fields: + - skip: + version: " - 7.99.99" + reason: "from illegal_argument_exception to action_request_validation_exception" + + - do: + catch: "bad_request" + search: + index: test + body: + stored_fields: _none_ + fields: [stored_keyword, keyword, stored_value, value, ignored_keyword, ignored_value, _ignored] + + - match: { status: 400 } + - match: { error.root_cause.0.type: action_request_validation_exception } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index b46f2752642a5..882eb1cf9c75b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -10,14 +10,25 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.search.SearchHit; +import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.LegacyTypeFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.RoutingFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; +import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.fetch.StoredFieldsSpec; import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; import java.util.Map; +import java.util.Set; /** * A fetch sub-phase for high-level field retrieval. Given a list of fields, it @@ -25,33 +36,77 @@ * and returns them as document fields. */ public final class FetchFieldsPhase implements FetchSubPhase { + + private static final List DEFAULT_METADATA_FIELDS = List.of( + new FieldAndFormat(IgnoredFieldMapper.NAME, null), + new FieldAndFormat(RoutingFieldMapper.NAME, null), + new FieldAndFormat(LegacyTypeFieldMapper.NAME, null) + ); + @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { - FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext(); - if (fetchFieldsContext == null) { + final FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext(); + final StoredFieldsContext storedFieldsContext = fetchContext.storedFieldsContext(); + + boolean fetchStoredFields = storedFieldsContext != null && storedFieldsContext.fetchFields(); + if (fetchFieldsContext == null && fetchStoredFields == false) { return null; } - FieldFetcher fieldFetcher = FieldFetcher.create(fetchContext.getSearchExecutionContext(), fetchFieldsContext.fields()); + final SearchExecutionContext searchExecutionContext = fetchContext.getSearchExecutionContext(); + final FieldFetcher fieldFetcher = fetchFieldsContext == null ? null + : fetchFieldsContext.fields() == null ? null + : fetchFieldsContext.fields().isEmpty() ? null + : FieldFetcher.create(searchExecutionContext, fetchFieldsContext.fields()); + final FieldFetcher metadataFieldFetcher; + if (storedFieldsContext != null + && storedFieldsContext.fieldNames() != null + && storedFieldsContext.fieldNames().isEmpty() == false) { + final Set metadataFields = new HashSet<>(DEFAULT_METADATA_FIELDS); + for (final String storedField : storedFieldsContext.fieldNames()) { + final Set matchingFieldNames = searchExecutionContext.getMatchingFieldNames(storedField); + for (final String matchingFieldName : matchingFieldNames) { + if (SourceFieldMapper.NAME.equals(matchingFieldName) || IdFieldMapper.NAME.equals(matchingFieldName)) { + continue; + } + final MappedFieldType fieldType = searchExecutionContext.getFieldType(matchingFieldName); + // NOTE: checking if the field is stored is required for backward compatibility reasons and to make + // sure we also handle here stored fields requested via `stored_fields`, which was previously a + // responsibility of StoredFieldsPhase. + if (searchExecutionContext.isMetadataField(matchingFieldName) && fieldType.isStored()) { + metadataFields.add(new FieldAndFormat(matchingFieldName, null)); + } + } + } + metadataFieldFetcher = FieldFetcher.create(searchExecutionContext, metadataFields); + } else { + metadataFieldFetcher = FieldFetcher.create(searchExecutionContext, DEFAULT_METADATA_FIELDS); + } return new FetchSubPhaseProcessor() { @Override public void setNextReader(LeafReaderContext readerContext) { - fieldFetcher.setNextReader(readerContext); + if (fieldFetcher != null) { + fieldFetcher.setNextReader(readerContext); + } + metadataFieldFetcher.setNextReader(readerContext); } @Override public StoredFieldsSpec storedFieldsSpec() { - return fieldFetcher.storedFieldsSpec(); + if (fieldFetcher != null) { + return fieldFetcher.storedFieldsSpec(); + } + return StoredFieldsSpec.NO_REQUIREMENTS; } @Override public void process(HitContext hitContext) throws IOException { - Map documentFields = fieldFetcher.fetch(hitContext.source(), hitContext.docId()); - SearchHit hit = hitContext.hit(); - for (Map.Entry entry : documentFields.entrySet()) { - hit.setDocumentField(entry.getKey(), entry.getValue()); - } + final Map fields = fieldFetcher != null + ? fieldFetcher.fetch(hitContext.source(), hitContext.docId()) + : Collections.emptyMap(); + final Map metadataFields = metadataFieldFetcher.fetch(hitContext.source(), hitContext.docId()); + hitContext.hit().addDocumentFields(fields, metadataFields); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java index 483285dba1fa7..ac03419b50d95 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/StoredFieldsPhase.java @@ -11,10 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.IgnoredFieldMapper; -import org.elasticsearch.index.mapper.LegacyTypeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.fetch.FetchContext; @@ -25,7 +22,6 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -37,7 +33,7 @@ public class StoredFieldsPhase implements FetchSubPhase { /** Associates a field name with a mapped field type and whether or not it is a metadata field */ - private record StoredField(String name, MappedFieldType ft, boolean isMetadataField) { + private record StoredField(String name, MappedFieldType ft) { /** Processes a set of stored fields using field type information */ List process(Map> loadedFields) { @@ -54,13 +50,6 @@ boolean hasValue(Map> loadedFields) { } - private static final List METADATA_FIELDS = List.of( - new StoredField("_routing", RoutingFieldMapper.FIELD_TYPE, true), - new StoredField("_ignored", IgnoredFieldMapper.FIELD_TYPE, true), - // pre-6.0 indexes can return a _type field, this will be valueless in modern indexes and ignored - new StoredField("_type", LegacyTypeFieldMapper.FIELD_TYPE, true) - ); - @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { StoredFieldsContext storedFieldsContext = fetchContext.storedFieldsContext(); @@ -69,7 +58,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { } // build the StoredFieldsSpec and a list of StoredField records to process - List storedFields = new ArrayList<>(METADATA_FIELDS); + List storedFields = new ArrayList<>(); Set fieldsToLoad = new HashSet<>(); if (storedFieldsContext.fieldNames() != null) { SearchExecutionContext sec = fetchContext.getSearchExecutionContext(); @@ -82,10 +71,10 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { continue; } MappedFieldType ft = sec.getFieldType(fieldName); - if (ft.isStored() == false) { + if (ft.isStored() == false || sec.isMetadataField(fieldName)) { continue; } - storedFields.add(new StoredField(fieldName, ft, sec.isMetadataField(ft.name()))); + storedFields.add(new StoredField(fieldName, ft)); fieldsToLoad.add(ft.name()); } } @@ -101,19 +90,12 @@ public void setNextReader(LeafReaderContext readerContext) { @Override public void process(HitContext hitContext) { Map> loadedFields = hitContext.loadedFields(); - Map docFields = new HashMap<>(); - Map metaFields = new HashMap<>(); for (StoredField storedField : storedFields) { if (storedField.hasValue(loadedFields)) { - DocumentField df = new DocumentField(storedField.name, storedField.process(loadedFields)); - if (storedField.isMetadataField) { - metaFields.put(storedField.name, df); - } else { - docFields.put(storedField.name, df); - } + hitContext.hit() + .setDocumentField(storedField.name, new DocumentField(storedField.name, storedField.process(loadedFields))); } } - hitContext.hit().addDocumentFields(docFields, metaFields); } @Override From faa7a3c6ef9e0b5752eff75efecab91addf30e6b Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 15 Apr 2024 10:14:20 +0100 Subject: [PATCH 004/130] Handle exceptions thrown by HTTP header validation (#107355) Today if the HTTP header validation throws an exception (rather than calling `listener.onFailure()`) then we treat this as a server-side error, record it in the logs, and close the connection abruptly without sending a response. In practice such an exception is more likely a client-side error, so with this commit we catch it and marshal it back to the client instead. Closes #107338 --- docs/changelog/107355.yaml | 6 +++ .../netty4/Netty4HttpHeaderValidator.java | 47 ++++++++++++------- .../Netty4HttpHeaderValidatorTests.java | 44 +++++++++++++++++ 3 files changed, 81 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/107355.yaml diff --git a/docs/changelog/107355.yaml b/docs/changelog/107355.yaml new file mode 100644 index 0000000000000..1d4813b877e58 --- /dev/null +++ b/docs/changelog/107355.yaml @@ -0,0 +1,6 @@ +pr: 107355 +summary: Handle exceptions thrown by HTTP header validation +area: Network +type: bug +issues: + - 107338 diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidator.java index 91471863e620f..ad322503b0d06 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidator.java @@ -107,24 +107,39 @@ private void requestStart(ChannelHandlerContext ctx) { if (httpRequest == null) { // this looks like a malformed request and will forward without validation - ctx.channel().eventLoop().submit(() -> forwardFullRequest(ctx)); + ctx.channel().eventLoop().execute(() -> forwardFullRequest(ctx)); } else { - Transports.assertDefaultThreadContext(threadContext); - // this prevents thread-context changes to propagate to the validation listener - // atm, the validation listener submits to the event loop executor, which doesn't know about the ES thread-context, - // so this is just a defensive play, in case the code inside the listener changes to not use the event loop executor - ContextPreservingActionListener contextPreservingActionListener = new ContextPreservingActionListener<>( - threadContext.wrapRestorable(threadContext.newStoredContext()), - ActionListener.wrap(aVoid -> - // Always use "Submit" to prevent reentrancy concerns if we are still on event loop - ctx.channel().eventLoop().submit(() -> forwardFullRequest(ctx)), - e -> ctx.channel().eventLoop().submit(() -> forwardRequestWithDecoderExceptionAndNoContent(ctx, e)) - ) + assert Transports.assertDefaultThreadContext(threadContext); + ActionListener.run( + // this prevents thread-context changes to propagate to the validation listener + // atm, the validation listener submits to the event loop executor, which doesn't know about the ES thread-context, + // so this is just a defensive play, in case the code inside the listener changes to not use the event loop executor + ActionListener.assertOnce( + new ContextPreservingActionListener( + threadContext.wrapRestorable(threadContext.newStoredContext()), + // Always explicitly dispatch back to the event loop to prevent reentrancy concerns if we are still on event loop + new ActionListener<>() { + @Override + public void onResponse(Void unused) { + assert Transports.assertDefaultThreadContext(threadContext); + ctx.channel().eventLoop().execute(() -> forwardFullRequest(ctx)); + } + + @Override + public void onFailure(Exception e) { + assert Transports.assertDefaultThreadContext(threadContext); + ctx.channel().eventLoop().execute(() -> forwardRequestWithDecoderExceptionAndNoContent(ctx, e)); + } + } + ) + ), + listener -> { + // this prevents thread-context changes to propagate beyond the validation, as netty worker threads are reused + try (ThreadContext.StoredContext ignore = threadContext.newStoredContext()) { + validator.validate(httpRequest, ctx.channel(), listener); + } + } ); - // this prevents thread-context changes to propagate beyond the validation, as netty worker threads are reused - try (ThreadContext.StoredContext ignore = threadContext.newStoredContext()) { - validator.validate(httpRequest, ctx.channel(), contextPreservingActionListener); - } } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidatorTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidatorTests.java index a2c034acdcb8d..e8622f2c95c2c 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidatorTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpHeaderValidatorTests.java @@ -52,6 +52,7 @@ public class Netty4HttpHeaderValidatorTests extends ESTestCase { private final AtomicReference> listener = new AtomicReference<>(); private EmbeddedChannel channel; private Netty4HttpHeaderValidator netty4HttpHeaderValidator; + private final AtomicReference validationException = new AtomicReference<>(); @Override public void setUp() throws Exception { @@ -63,8 +64,13 @@ private void reset() { channel = new EmbeddedChannel(); header.set(null); listener.set(null); + validationException.set(null); HttpValidator validator = (httpRequest, channel, validationCompleteListener) -> { header.set(httpRequest); + final var exception = validationException.get(); + if (exception != null) { + throw exception; + } listener.set(validationCompleteListener); }; netty4HttpHeaderValidator = new Netty4HttpHeaderValidator(validator, new ThreadContext(Settings.EMPTY)); @@ -253,6 +259,7 @@ public void testValidationErrorForwardsAsDecoderErrorMessage() { final DefaultHttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/uri"); final DefaultHttpContent content = new DefaultHttpContent(Unpooled.buffer(4)); + channel.writeInbound(request); channel.writeInbound(content); @@ -285,6 +292,43 @@ public void testValidationErrorForwardsAsDecoderErrorMessage() { } } + public void testValidationExceptionForwardsAsDecoderErrorMessage() { + final var exception = new ElasticsearchException("Failure"); + assertTrue(channel.config().isAutoRead()); + assertThat(netty4HttpHeaderValidator.getState(), equalTo(WAITING_TO_START)); + + final DefaultHttpRequest request = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/uri"); + + validationException.set(exception); + channel.writeInbound(request); + + assertThat(header.get(), sameInstance(request)); + assertThat(listener.get(), nullValue()); + + channel.runPendingTasks(); + assertTrue(channel.config().isAutoRead()); + DefaultHttpRequest failed = channel.readInbound(); + assertThat(failed, sameInstance(request)); + assertThat(failed.headers().get(HttpHeaderNames.CONNECTION), nullValue()); + assertTrue(failed.decoderResult().isFailure()); + Exception cause = (Exception) failed.decoderResult().cause(); + assertThat(cause, equalTo(exception)); + assertThat(netty4HttpHeaderValidator.getState(), equalTo(DROPPING_DATA_UNTIL_NEXT_REQUEST)); + + final DefaultHttpContent content = new DefaultHttpContent(Unpooled.buffer(4)); + channel.writeInbound(content); + + assertThat(channel.readInbound(), nullValue()); + assertThat(content.refCnt(), equalTo(0)); + + DefaultLastHttpContent lastContent = new DefaultLastHttpContent(Unpooled.buffer(4)); + channel.writeInbound(lastContent); + assertTrue(channel.config().isAutoRead()); + assertThat(netty4HttpHeaderValidator.getState(), equalTo(WAITING_TO_START)); + assertThat(channel.readInbound(), nullValue()); + assertThat(lastContent.refCnt(), equalTo(0)); + } + public void testValidationHandlesMultipleQueuedUpMessages() { assertTrue(channel.config().isAutoRead()); assertThat(netty4HttpHeaderValidator.getState(), equalTo(WAITING_TO_START)); From eac0ab9b6e22eb881ef2a2a6495b2a46212da668 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 15 Apr 2024 12:16:36 +0300 Subject: [PATCH 005/130] Remove trace logging, no test failures (#107452) No test failures over the past 4 days. Fixes #105485 --- .../org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 391f42999b772..5ca5da555718b 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -504,7 +504,6 @@ public void testDownsampleTwice() throws Exception { } } - @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/105485") public void testDownsampleTwiceSameInterval() throws Exception { // Create the ILM policy Request request = new Request("PUT", "_ilm/policy/" + policy); From 17d6bc210ca45729e819be3e14b80520fb912b21 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 15 Apr 2024 11:21:10 +0200 Subject: [PATCH 006/130] Fix another encoding issue in Doc unit tests (#107453) --- .../internal/doc/DocSnippetTaskSpec.groovy | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy index 96888357d8433..894e6e9b51ab8 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/DocSnippetTaskSpec.groovy @@ -529,9 +529,11 @@ GET /_analyze { "type": "mapping", "mappings": [ - "٠ => 0", - "١ => 1", - "٢ => 2" + "e => 0", + "m => 1", + "p => 2", + "t => 3", + "y => 4" ] } ], @@ -551,9 +553,11 @@ GET /_analyze { "type": "mapping", "mappings": [ - "٠ => 0", - "١ => 1", - "٢ => 2" + "e => 0", + "m => 1", + "p => 2", + "t => 3", + "y => 4" ] } ], From ecc406edfc5a9b1713751ebe80b2e7fd9ee228c9 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 15 Apr 2024 10:35:24 +0100 Subject: [PATCH 007/130] [ML] Create default word based chunker (#107303) WordBoundaryChunker uses ICU4J to split text at word boundaries creating chunks from long inputs. The chunksize and overlap parameters are measured in words. The chunk text is then processed in batches depending on the inference services supported batch size. --- docs/changelog/107303.yaml | 5 + .../ChunkedTextEmbeddingFloatResults.java | 116 ++++++++ x-pack/plugin/inference/build.gradle | 2 + .../inference/licenses/icu4j-LICENSE.txt | 33 +++ .../inference/licenses/icu4j-NOTICE.txt | 3 + .../inference/src/main/java/module-info.java | 1 + .../InferenceNamedWriteablesProvider.java | 8 + .../common/EmbeddingRequestChunker.java | 264 +++++++++++++++++ .../inference/common/WordBoundaryChunker.java | 111 +++++++ .../services/cohere/CohereService.java | 36 +-- .../services/cohere/CohereServiceFields.java | 5 + .../services/openai/OpenAiService.java | 18 +- .../services/openai/OpenAiServiceFields.java | 5 + .../common/EmbeddingRequestChunkerTests.java | 278 ++++++++++++++++++ .../common/WordBoundaryChunkerTests.java | 221 ++++++++++++++ ...ChunkedTextEmbeddingFloatResultsTests.java | 56 ++++ .../services/cohere/CohereServiceTests.java | 132 ++------- .../services/openai/OpenAiServiceTests.java | 53 ++-- 18 files changed, 1190 insertions(+), 157 deletions(-) create mode 100644 docs/changelog/107303.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java create mode 100644 x-pack/plugin/inference/licenses/icu4j-LICENSE.txt create mode 100644 x-pack/plugin/inference/licenses/icu4j-NOTICE.txt create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java diff --git a/docs/changelog/107303.yaml b/docs/changelog/107303.yaml new file mode 100644 index 0000000000000..2e04ce6be3627 --- /dev/null +++ b/docs/changelog/107303.yaml @@ -0,0 +1,5 @@ +pr: 107303 +summary: Create default word based chunker +area: Machine Learning +type: feature +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java new file mode 100644 index 0000000000000..e1668ec34478f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedTextEmbeddingFloatResults.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public record ChunkedTextEmbeddingFloatResults(List chunks) implements ChunkedInferenceServiceResults { + + public static final String NAME = "chunked_text_embedding_service_float_results"; + public static final String FIELD_NAME = "text_embedding_float_chunk"; + + public ChunkedTextEmbeddingFloatResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(EmbeddingChunk::new)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + // TODO add isTruncated flag + builder.startArray(FIELD_NAME); + for (var embedding : chunks) { + embedding.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(chunks); + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException("Chunked results are not returned in the coordinated action"); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException("Chunked results are not returned in the legacy format"); + } + + @Override + public Map asMap() { + return Map.of(FIELD_NAME, chunks.stream().map(EmbeddingChunk::asMap).collect(Collectors.toList())); + } + + @Override + public String getWriteableName() { + return NAME; + } + + public List getChunks() { + return chunks; + } + + public record EmbeddingChunk(String matchedText, List embedding) implements Writeable, ToXContentObject { + + public EmbeddingChunk(StreamInput in) throws IOException { + this(in.readString(), in.readCollectionAsImmutableList(StreamInput::readFloat)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(matchedText); + out.writeCollection(embedding, StreamOutput::writeFloat); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ChunkedNlpInferenceResults.TEXT, matchedText); + + builder.startArray(ChunkedNlpInferenceResults.INFERENCE); + for (Float value : embedding) { + builder.value(value); + } + builder.endArray(); + + builder.endObject(); + return builder; + } + + public Map asMap() { + var map = new HashMap(); + map.put(ChunkedNlpInferenceResults.TEXT, matchedText); + map.put(ChunkedNlpInferenceResults.INFERENCE, embedding); + return map; + } + + @Override + public String toString() { + return Strings.toString(this); + } + } + +} diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index e4f4de0027073..2c473517e5aab 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -24,4 +24,6 @@ dependencies { compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) testImplementation project(':modules:reindex') + + api "com.ibm.icu:icu4j:${versions.icu4j}" } diff --git a/x-pack/plugin/inference/licenses/icu4j-LICENSE.txt b/x-pack/plugin/inference/licenses/icu4j-LICENSE.txt new file mode 100644 index 0000000000000..e76faec4ad20f --- /dev/null +++ b/x-pack/plugin/inference/licenses/icu4j-LICENSE.txt @@ -0,0 +1,33 @@ +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +provided that the above copyright notice(s) and this permission notice appear +in all copies of the Software and that both the above copyright notice(s) and +this permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE +LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall not +be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property of +their respective owners. diff --git a/x-pack/plugin/inference/licenses/icu4j-NOTICE.txt b/x-pack/plugin/inference/licenses/icu4j-NOTICE.txt new file mode 100644 index 0000000000000..47eeab14f2ef6 --- /dev/null +++ b/x-pack/plugin/inference/licenses/icu4j-NOTICE.txt @@ -0,0 +1,3 @@ +ICU4J, (under lucene/analysis/icu) is licensed under an MIT style license +(modules/analysis/icu/lib/icu4j-LICENSE-BSD_LIKE.txt) and Copyright (c) 1995-2012 +International Business Machines Corporation and others \ No newline at end of file diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 2d25a48117778..6106600ee5f33 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -17,6 +17,7 @@ requires org.apache.httpcomponents.httpasyncclient; requires org.apache.httpcomponents.httpcore.nio; requires org.apache.lucene.core; + requires com.ibm.icu; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 21bd73c3821c4..8d01b25aa2795 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; @@ -105,6 +106,13 @@ public static List getNamedWriteables() { ChunkedTextEmbeddingResults::new ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + InferenceServiceResults.class, + ChunkedTextEmbeddingFloatResults.NAME, + ChunkedTextEmbeddingFloatResults::new + ) + ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceServiceResults.class, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java new file mode 100644 index 0000000000000..77d03ac660952 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java @@ -0,0 +1,264 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +/** + * This class creates optimally sized batches of input strings + * for batched processing splitting long strings into smaller + * chunks. Multiple inputs may be fit into a single batch or + * a single large input that has been chunked may spread over + * multiple batches. + * + * The final aspect it to gather the responses from the batch + * processing and map the results back to the original element + * in the input list. + */ +public class EmbeddingRequestChunker { + + public static final int DEFAULT_WORDS_PER_CHUNK = 250; + public static final int DEFAULT_CHUNK_OVERLAP = 100; + + private final List batchedRequests = new ArrayList<>(); + private final AtomicInteger resultCount = new AtomicInteger(); + private final int maxNumberOfInputsPerBatch; + private final int wordsPerChunk; + private final int chunkOverlap; + + private List> chunkedInputs; + private List>> results; + private AtomicArray errors; + private ActionListener> finalListener; + + public EmbeddingRequestChunker(List inputs, int maxNumberOfInputsPerBatch) { + this.maxNumberOfInputsPerBatch = maxNumberOfInputsPerBatch; + this.wordsPerChunk = DEFAULT_WORDS_PER_CHUNK; + this.chunkOverlap = DEFAULT_CHUNK_OVERLAP; + splitIntoBatchedRequests(inputs); + } + + public EmbeddingRequestChunker(List inputs, int maxNumberOfInputsPerBatch, int wordsPerChunk, int chunkOverlap) { + this.maxNumberOfInputsPerBatch = maxNumberOfInputsPerBatch; + this.wordsPerChunk = wordsPerChunk; + this.chunkOverlap = chunkOverlap; + splitIntoBatchedRequests(inputs); + } + + private void splitIntoBatchedRequests(List inputs) { + var chunker = new WordBoundaryChunker(); + chunkedInputs = new ArrayList<>(inputs.size()); + results = new ArrayList<>(inputs.size()); + errors = new AtomicArray<>(inputs.size()); + + for (int i = 0; i < inputs.size(); i++) { + var chunks = chunker.chunk(inputs.get(i), wordsPerChunk, chunkOverlap); + int numberOfSubBatches = addToBatches(chunks, i); + // size the results array with the expected number of request/responses + results.add(new AtomicArray<>(numberOfSubBatches)); + chunkedInputs.add(chunks); + } + } + + private int addToBatches(List chunks, int inputIndex) { + BatchRequest lastBatch; + if (batchedRequests.isEmpty()) { + lastBatch = new BatchRequest(new ArrayList<>()); + batchedRequests.add(lastBatch); + } else { + lastBatch = batchedRequests.get(batchedRequests.size() - 1); + } + + int freeSpace = maxNumberOfInputsPerBatch - lastBatch.size(); + assert freeSpace >= 0; + + // chunks may span multiple batches, + // the chunkIndex keeps them ordered. + int chunkIndex = 0; + + if (freeSpace > 0) { + // use any free space in the previous batch before creating new batches + int toAdd = Math.min(freeSpace, chunks.size()); + lastBatch.addSubBatch(new SubBatch(chunks.subList(0, toAdd), new SubBatchPositionsAndCount(inputIndex, chunkIndex++, toAdd))); + } + + int start = freeSpace; + while (start < chunks.size()) { + int toAdd = Math.min(maxNumberOfInputsPerBatch, chunks.size() - start); + var batch = new BatchRequest(new ArrayList<>()); + batch.addSubBatch( + new SubBatch(chunks.subList(start, start + toAdd), new SubBatchPositionsAndCount(inputIndex, chunkIndex++, toAdd)) + ); + batchedRequests.add(batch); + start += toAdd; + } + + return chunkIndex; + } + + /** + * Returns a list of batched inputs and a ActionListener for each batch. + * @param finalListener The listener to call once all the batches are processed + * @return Batches and listeners + */ + public List batchRequestsWithListeners(ActionListener> finalListener) { + this.finalListener = finalListener; + + int numberOfRequests = batchedRequests.size(); + + var requests = new ArrayList(numberOfRequests); + for (var batch : batchedRequests) { + requests.add( + new BatchRequestAndListener( + batch, + new DebatchingListener( + batch.subBatches().stream().map(SubBatch::positions).collect(Collectors.toList()), + numberOfRequests + ) + ) + ); + } + + return requests; + } + + /** + * A grouping listener that calls the final listener only when + * all responses have been received. + * Long inputs that were split into chunks are reassembled and + * returned as a single chunked response. + * The listener knows where in the results array to insert the + * response so that order is preserved. + */ + private class DebatchingListener implements ActionListener { + + private final List positions; + private final int totalNumberOfRequests; + + DebatchingListener(List positions, int totalNumberOfRequests) { + this.positions = positions; + this.totalNumberOfRequests = totalNumberOfRequests; + } + + @Override + public void onResponse(InferenceServiceResults inferenceServiceResults) { + if (inferenceServiceResults instanceof TextEmbeddingResults textEmbeddingResults) { // TODO byte embeddings + int numRequests = positions.stream().mapToInt(SubBatchPositionsAndCount::embeddingCount).sum(); + if (numRequests != textEmbeddingResults.embeddings().size()) { + onFailure( + new ElasticsearchStatusException( + "Error the number of embedding responses [{}] does not equal the number of " + "requests [{}]", + RestStatus.BAD_REQUEST, + textEmbeddingResults.embeddings().size(), + numRequests + ) + ); + return; + } + + int start = 0; + for (var pos : positions) { + results.get(pos.inputIndex()) + .setOnce(pos.chunkIndex(), textEmbeddingResults.embeddings().subList(start, start + pos.embeddingCount())); + start += pos.embeddingCount(); + } + } + + if (resultCount.incrementAndGet() == totalNumberOfRequests) { + sendResponse(); + } + } + + @Override + public void onFailure(Exception e) { + var errorResult = new ErrorChunkedInferenceResults(e); + for (var pos : positions) { + errors.setOnce(pos.inputIndex(), errorResult); + } + + if (resultCount.incrementAndGet() == totalNumberOfRequests) { + sendResponse(); + } + } + + private void sendResponse() { + var response = new ArrayList(chunkedInputs.size()); + for (int i = 0; i < chunkedInputs.size(); i++) { + if (errors.get(i) != null) { + response.add(errors.get(i)); + } else { + response.add(merge(chunkedInputs.get(i), results.get(i))); + } + } + + finalListener.onResponse(response); + } + + private ChunkedTextEmbeddingFloatResults merge( + List chunks, + AtomicArray> debatchedResults + ) { + var all = new ArrayList(); + for (int i = 0; i < debatchedResults.length(); i++) { + var subBatch = debatchedResults.get(i); + all.addAll(subBatch); + } + + assert chunks.size() == all.size(); + + var embeddingChunks = new ArrayList(); + for (int i = 0; i < chunks.size(); i++) { + embeddingChunks.add(new ChunkedTextEmbeddingFloatResults.EmbeddingChunk(chunks.get(i), all.get(i).values())); + } + + return new ChunkedTextEmbeddingFloatResults(embeddingChunks); + } + } + + public record BatchRequest(List subBatches) { + public int size() { + return subBatches.stream().mapToInt(SubBatch::size).sum(); + } + + public void addSubBatch(SubBatch sb) { + subBatches.add(sb); + } + + public List inputs() { + return subBatches.stream().flatMap(s -> s.requests().stream()).collect(Collectors.toList()); + } + } + + public record BatchRequestAndListener(BatchRequest batch, ActionListener listener) { + + } + + /** + * Used for mapping batched requests back to the original input + */ + record SubBatchPositionsAndCount(int inputIndex, int chunkIndex, int embeddingCount) {} + + record SubBatch(List requests, SubBatchPositionsAndCount positions) { + public int size() { + return requests.size(); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java new file mode 100644 index 0000000000000..d3bb9154fd426 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import com.ibm.icu.text.BreakIterator; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +/** + * Breaks text into smaller strings or chunks on Word boundaries. + * Whitespace is preserved and included in the start of the + * following chunk not the end of the chunk. If the chunk ends + * on a punctuation mark the punctuation is included in the + * next chunk. + * + * The overlap value must be > (chunkSize /2) to avoid the + * complexity of tracking the start positions of multiple + * chunks within the chunk. + */ +public class WordBoundaryChunker { + + private BreakIterator wordIterator; + + public WordBoundaryChunker() { + wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + } + + /** + * Break the input text into small chunks as dictated + * by the chunking parameters + * @param input Text to chunk + * @param chunkSize The number of words in each chunk + * @param overlap The number of words to overlap each chunk. + * Can be 0 but must be non-negative. + * @return List of chunked text + */ + public List chunk(String input, int chunkSize, int overlap) { + if (overlap > 0 && overlap > chunkSize / 2) { + throw new IllegalArgumentException( + "Invalid chunking parameters, overlap [" + + overlap + + "] must be < chunk size / 2 [" + + chunkSize + + " / 2 = " + + chunkSize / 2 + + "]" + ); + } + + if (overlap < 0) { + throw new IllegalArgumentException("Invalid chunking parameters, overlap [" + overlap + "] must be >= 0"); + } + + if (input.isEmpty()) { + return List.of(""); + } + + var chunks = new ArrayList(); + + // This position in the chunk is where the next overlapping chunk will start + final int chunkSizeLessOverlap = chunkSize - overlap; + // includes the count of words from the overlap portion in the previous chunk + int wordsInChunkCountIncludingOverlap = 0; + int nextWindowStart = 0; + int windowStart = 0; + int wordsSinceStartWindowWasMarked = 0; + + wordIterator.setText(input); + int boundary = wordIterator.next(); + + while (boundary != BreakIterator.DONE) { + if (wordIterator.getRuleStatus() != BreakIterator.WORD_NONE) { + wordsInChunkCountIncludingOverlap++; + wordsSinceStartWindowWasMarked++; + + if (wordsInChunkCountIncludingOverlap >= chunkSize) { + chunks.add(input.substring(windowStart, boundary)); + wordsInChunkCountIncludingOverlap = overlap; + + if (overlap == 0) { + nextWindowStart = boundary; + } + + windowStart = nextWindowStart; + } + + if (wordsSinceStartWindowWasMarked == chunkSizeLessOverlap) { + nextWindowStart = boundary; + wordsSinceStartWindowWasMarked = 0; + } + } + boundary = wordIterator.next(); + } + + // Get the last chunk that was shorter than the required chunk size + // if it ends on a boundary than the count should equal overlap in which case + // we can ignore it, unless this is the first chunk in which case we want to add it + if (wordsInChunkCountIncludingOverlap > overlap || chunks.isEmpty()) { + chunks.add(input.substring(windowStart)); + } + + return chunks; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index d6050e8d91754..9dec42ed59527 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -23,12 +23,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -45,12 +40,12 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.cohere.CohereServiceFields.EMBEDDING_MAX_BATCH_SIZE; public class CohereService extends SenderService { public static final String NAME = "cohere"; @@ -229,25 +224,18 @@ protected void doChunkedInfer( TimeValue timeout, ActionListener> listener ) { - ActionListener inferListener = listener.delegateFailureAndWrap( - (delegate, response) -> delegate.onResponse(translateToChunkedResults(input, response)) - ); + if (model instanceof CohereModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } - doInfer(model, input, taskSettings, inputType, timeout, inferListener); - } + CohereModel cohereModel = (CohereModel) model; + var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); - private static List translateToChunkedResults( - List inputs, - InferenceServiceResults inferenceResults - ) { - if (inferenceResults instanceof TextEmbeddingResults textEmbeddingResults) { - return ChunkedTextEmbeddingResults.of(inputs, textEmbeddingResults); - } else if (inferenceResults instanceof TextEmbeddingByteResults textEmbeddingByteResults) { - return ChunkedTextEmbeddingByteResults.of(inputs, textEmbeddingByteResults); - } else if (inferenceResults instanceof ErrorInferenceResults error) { - return List.of(new ErrorChunkedInferenceResults(error.getException())); - } else { - throw createInvalidChunkedResultException(inferenceResults.getWriteableName()); + var batchedRequests = new EmbeddingRequestChunker(input, EMBEDDING_MAX_BATCH_SIZE).batchRequestsWithListeners(listener); + for (var request : batchedRequests) { + var action = cohereModel.accept(actionCreator, taskSettings, inputType); + action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, request.listener()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java index 807520637f971..a31b0bf592342 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceFields.java @@ -9,4 +9,9 @@ public class CohereServiceFields { public static final String TRUNCATE = "truncate"; + + /** + * Taken from https://docs.cohere.com/reference/embed + */ + static final int EMBEDDING_MAX_BATCH_SIZE = 96; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index b6b29115620a1..d0981858a3ae9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; @@ -50,6 +51,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.EMBEDDING_MAX_BATCH_SIZE; public class OpenAiService extends SenderService { public static final String NAME = "openai"; @@ -232,11 +234,19 @@ protected void doChunkedInfer( TimeValue timeout, ActionListener> listener ) { - ActionListener inferListener = listener.delegateFailureAndWrap( - (delegate, response) -> delegate.onResponse(translateToChunkedResults(input, response)) - ); + if (model instanceof OpenAiModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } - doInfer(model, input, taskSettings, inputType, timeout, inferListener); + OpenAiModel openAiModel = (OpenAiModel) model; + var actionCreator = new OpenAiActionCreator(getSender(), getServiceComponents()); + + var batchedRequests = new EmbeddingRequestChunker(input, EMBEDDING_MAX_BATCH_SIZE).batchRequestsWithListeners(listener); + for (var request : batchedRequests) { + var action = openAiModel.accept(actionCreator, taskSettings); + action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, request.listener()); + } } private static List translateToChunkedResults( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java index bafe1b031b028..ca2bc56866aa5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java @@ -13,4 +13,9 @@ public class OpenAiServiceFields { public static final String ORGANIZATION = "organization_id"; + /** + * Taken from https://platform.openai.com/docs/api-reference/embeddings/create + */ + static final int EMBEDDING_MAX_BATCH_SIZE = 2048; + } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java new file mode 100644 index 0000000000000..be80008f10b44 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; + +public class EmbeddingRequestChunkerTests extends ESTestCase { + + public void testShortInputsAreSingleBatch() { + String input = "one chunk"; + + var batches = new EmbeddingRequestChunker(List.of(input), 100, 100, 10).batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(1)); + assertThat(batches.get(0).batch().inputs(), contains(input)); + } + + public void testMultipleShortInputsAreSingleBatch() { + List inputs = List.of("1st small", "2nd small", "3rd small"); + + var batches = new EmbeddingRequestChunker(inputs, 100, 100, 10).batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(1)); + assertEquals(batches.get(0).batch().inputs(), inputs); + var subBatches = batches.get(0).batch().subBatches(); + for (int i = 0; i < inputs.size(); i++) { + var subBatch = subBatches.get(i); + assertThat(subBatch.requests(), contains(inputs.get(i))); + assertEquals(0, subBatch.positions().chunkIndex()); + assertEquals(i, subBatch.positions().inputIndex()); + assertEquals(1, subBatch.positions().embeddingCount()); + } + } + + public void testManyInputsMakeManyBatches() { + int maxNumInputsPerBatch = 10; + int numInputs = maxNumInputsPerBatch * 3 + 1; // requires 4 batches + var inputs = new ArrayList(); + // + for (int i = 0; i < numInputs; i++) { + inputs.add("input " + i); + } + + var batches = new EmbeddingRequestChunker(inputs, maxNumInputsPerBatch, 100, 10).batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(4)); + assertThat(batches.get(0).batch().inputs(), hasSize(maxNumInputsPerBatch)); + assertThat(batches.get(1).batch().inputs(), hasSize(maxNumInputsPerBatch)); + assertThat(batches.get(2).batch().inputs(), hasSize(maxNumInputsPerBatch)); + assertThat(batches.get(3).batch().inputs(), hasSize(1)); + + assertEquals("input 0", batches.get(0).batch().inputs().get(0)); + assertEquals("input 9", batches.get(0).batch().inputs().get(9)); + assertThat( + batches.get(1).batch().inputs(), + contains("input 10", "input 11", "input 12", "input 13", "input 14", "input 15", "input 16", "input 17", "input 18", "input 19") + ); + assertEquals("input 20", batches.get(2).batch().inputs().get(0)); + assertEquals("input 29", batches.get(2).batch().inputs().get(9)); + assertThat(batches.get(3).batch().inputs(), contains("input 30")); + + int inputIndex = 0; + var subBatches = batches.get(0).batch().subBatches(); + for (int i = 0; i < batches.size(); i++) { + var subBatch = subBatches.get(i); + assertThat(subBatch.requests(), contains(inputs.get(i))); + assertEquals(0, subBatch.positions().chunkIndex()); + assertEquals(inputIndex, subBatch.positions().inputIndex()); + assertEquals(1, subBatch.positions().embeddingCount()); + inputIndex++; + } + } + + public void testLongInputChunkedOverMultipleBatches() { + int batchSize = 5; + int chunkSize = 20; + int overlap = 0; + // passage will be chunked into batchSize + 1 parts + // and spread over 2 batch requests + int numberOfWordsInPassage = (chunkSize * batchSize) + 5; + + var passageBuilder = new StringBuilder(); + for (int i = 0; i < numberOfWordsInPassage; i++) { + passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace + } + + List inputs = List.of("1st small", passageBuilder.toString(), "2nd small", "3rd small"); + + var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(2)); + { + var batch = batches.get(0).batch(); + assertThat(batch.inputs(), hasSize(batchSize)); + assertEquals(batchSize, batch.size()); + assertThat(batch.subBatches(), hasSize(2)); + { + var subBatch = batch.subBatches().get(0); + assertEquals(0, subBatch.positions().inputIndex()); + assertEquals(0, subBatch.positions().chunkIndex()); + assertEquals(1, subBatch.positions().embeddingCount()); + assertThat(subBatch.requests(), contains("1st small")); + } + { + var subBatch = batch.subBatches().get(1); + assertEquals(1, subBatch.positions().inputIndex()); // 2nd input + assertEquals(0, subBatch.positions().chunkIndex()); // 1st part of the 2nd input + assertEquals(4, subBatch.positions().embeddingCount()); // 4 chunks + assertThat(subBatch.requests().get(0), startsWith("passage_input0 ")); + assertThat(subBatch.requests().get(1), startsWith(" passage_input20 ")); + assertThat(subBatch.requests().get(2), startsWith(" passage_input40 ")); + assertThat(subBatch.requests().get(3), startsWith(" passage_input60 ")); + } + } + { + var batch = batches.get(1).batch(); + assertThat(batch.inputs(), hasSize(4)); + assertEquals(4, batch.size()); + assertThat(batch.subBatches(), hasSize(3)); + { + var subBatch = batch.subBatches().get(0); + assertEquals(1, subBatch.positions().inputIndex()); // 2nd input + assertEquals(1, subBatch.positions().chunkIndex()); // 2nd part of the 2nd input + assertEquals(2, subBatch.positions().embeddingCount()); + assertThat(subBatch.requests().get(0), startsWith(" passage_input80 ")); + assertThat(subBatch.requests().get(1), startsWith(" passage_input100 ")); + } + { + var subBatch = batch.subBatches().get(1); + assertEquals(2, subBatch.positions().inputIndex()); // 3rd input + assertEquals(0, subBatch.positions().chunkIndex()); // 1st and only part + assertEquals(1, subBatch.positions().embeddingCount()); // 1 chunk + assertThat(subBatch.requests(), contains("2nd small")); + } + { + var subBatch = batch.subBatches().get(2); + assertEquals(3, subBatch.positions().inputIndex()); // 4th input + assertEquals(0, subBatch.positions().chunkIndex()); // 1st and only part + assertEquals(1, subBatch.positions().embeddingCount()); // 1 chunk + assertThat(subBatch.requests(), contains("3rd small")); + } + } + } + + public void testMergingListener() { + int batchSize = 5; + int chunkSize = 20; + int overlap = 0; + // passage will be chunked into batchSize + 1 parts + // and spread over 2 batch requests + int numberOfWordsInPassage = (chunkSize * batchSize) + 5; + + var passageBuilder = new StringBuilder(); + for (int i = 0; i < numberOfWordsInPassage; i++) { + passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace + } + List inputs = List.of("1st small", passageBuilder.toString(), "2nd small", "3rd small"); + + var finalListener = testListener(); + var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(finalListener); + assertThat(batches, hasSize(2)); + + // 4 inputs in 2 batches + { + var embeddings = new ArrayList(); + for (int i = 0; i < batchSize; i++) { + embeddings.add(new TextEmbeddingResults.Embedding(List.of(randomFloat()))); + } + batches.get(0).listener().onResponse(new TextEmbeddingResults(embeddings)); + } + { + var embeddings = new ArrayList(); + for (int i = 0; i < 4; i++) { // 4 requests in the 2nd batch + embeddings.add(new TextEmbeddingResults.Embedding(List.of(randomFloat()))); + } + batches.get(1).listener().onResponse(new TextEmbeddingResults(embeddings)); + } + + assertNotNull(finalListener.results); + assertThat(finalListener.results, hasSize(4)); + { + var chunkedResult = finalListener.results.get(0); + assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedFloatResult.chunks(), hasSize(1)); + assertEquals("1st small", chunkedFloatResult.chunks().get(0).matchedText()); + } + { + // this is the large input split in multiple chunks + var chunkedResult = finalListener.results.get(1); + assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedFloatResult.chunks(), hasSize(6)); + assertThat(chunkedFloatResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); + assertThat(chunkedFloatResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); + assertThat(chunkedFloatResult.chunks().get(2).matchedText(), startsWith(" passage_input40 ")); + assertThat(chunkedFloatResult.chunks().get(3).matchedText(), startsWith(" passage_input60 ")); + assertThat(chunkedFloatResult.chunks().get(4).matchedText(), startsWith(" passage_input80 ")); + assertThat(chunkedFloatResult.chunks().get(5).matchedText(), startsWith(" passage_input100 ")); + } + { + var chunkedResult = finalListener.results.get(2); + assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedFloatResult.chunks(), hasSize(1)); + assertEquals("2nd small", chunkedFloatResult.chunks().get(0).matchedText()); + } + { + var chunkedResult = finalListener.results.get(3); + assertThat(chunkedResult, instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var chunkedFloatResult = (ChunkedTextEmbeddingFloatResults) chunkedResult; + assertThat(chunkedFloatResult.chunks(), hasSize(1)); + assertEquals("3rd small", chunkedFloatResult.chunks().get(0).matchedText()); + } + } + + public void testListenerErrorsWithWrongNumberOfResponses() { + List inputs = List.of("1st small", "2nd small", "3rd small"); + + var failureMessage = new AtomicReference(); + var listener = new ActionListener>() { + + @Override + public void onResponse(List chunkedInferenceServiceResults) { + assertThat(chunkedInferenceServiceResults.get(0), instanceOf(ErrorChunkedInferenceResults.class)); + var error = (ErrorChunkedInferenceResults) chunkedInferenceServiceResults.get(0); + failureMessage.set(error.getException().getMessage()); + } + + @Override + public void onFailure(Exception e) { + fail("expected a response with an error"); + } + }; + + var batches = new EmbeddingRequestChunker(inputs, 10, 100, 0).batchRequestsWithListeners(listener); + assertThat(batches, hasSize(1)); + + var embeddings = new ArrayList(); + embeddings.add(new TextEmbeddingResults.Embedding(List.of(randomFloat()))); + embeddings.add(new TextEmbeddingResults.Embedding(List.of(randomFloat()))); + batches.get(0).listener().onResponse(new TextEmbeddingResults(embeddings)); + assertEquals("Error the number of embedding responses [2] does not equal the number of requests [3]", failureMessage.get()); + } + + private ChunkedResultsListener testListener() { + return new ChunkedResultsListener(); + } + + private static class ChunkedResultsListener implements ActionListener> { + List results; + + @Override + public void onResponse(List chunkedInferenceServiceResults) { + this.results = chunkedInferenceServiceResults; + } + + @Override + public void onFailure(Exception e) { + fail(e.getMessage()); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java new file mode 100644 index 0000000000000..14cb63673e174 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; + +public class WordBoundaryChunkerTests extends ESTestCase { + + private final String TEST_TEXT = "Word segmentation is the problem of dividing a string of written language into its component words.\n" + + "In English and many other languages using some form of the Latin alphabet, the space is a good approximation of a word divider " + + "(word delimiter), although this concept has limits because of the variability with which languages emically regard collocations " + + "and compounds. Many English compound nouns are variably written (for example, ice box = ice-box = icebox; pig sty = pig-sty = " + + "pigsty) with a corresponding variation in whether speakers think of them as noun phrases or single nouns; there are trends in " + + "how norms are set, such as that open compounds often tend eventually to solidify by widespread convention, but variation remains" + + " systemic. In contrast, German compound nouns show less orthographic variation, with solidification being a stronger norm."; + + private final String[] MULTI_LINGUAL = new String[] { + "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije" + + " Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na " + + "liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā " + + "stavljali su glinu razmočenu vodom. Tim su materijalom gradili prostorije veličine do 6 četvornih metara. U potkrovljima " + + "su skladištili žitarice i druge plodine, dok su kive - ceremonijalne prostorije - gradili ispred soba, ali ukopane u zemlju," + + " nešto poput današnjih podruma. Kiva je bila vrhunski dizajnirana prostorija okruglog oblika s prostorom za vatru zimi te s" + + " dovodom hladnog zraka za klimatizaciju ljeti. U zidane konstrukcije stavljali su i lokalno posječena stabla, što današnjim" + + " arheolozima pomaže u preciznom datiranju nastanka pojedine građevine metodom dendrokronologije. Ta stabla pridonose i" + + " teoriji o mogućem konačnom slomu ondašnjeg društva. Nakon što su, tijekom nekoliko stoljeća, šume do kraja srušene, a " + + "njihova obnova zbog sušne klime traje i po 200 godina, nije proteklo puno vremena do konačnog urušavanja civilizacije, " + + "koja se, na svojem vrhuncu osjećala nepobjedivom. 90 % sagrađenih naseobina ispod stijena ima do deset prostorija. ⅓ od " + + "ukupnog broja sagrađenih kuća ima jednu ili dvije kamene prostorije", + "Histoarysk wie in acre in stik lân dat 40 roeden (oftewol 1 furlong of ⅛ myl of 660 foet) lang wie, en 4 roeden (of 66 foet) " + + "breed. Men is fan tinken dat dat likernôch de grûnmjitte wie dy't men mei in jok oksen yn ien dei beploegje koe.", + "創業当初の「太平洋化学工業社」から1959年太平洋化学工業株式会社へ、1987年には太平洋化学㈱に社名を変更。 1990年以降、海外拠点を増やし本格的な国際進出を始動。" + + " 創業者がつくりあげた化粧品会社を世界企業へと成長させるべく2002年3月英文社名AMOREPACIFICに改めた。", + "۱۔ ھن شق جي مطابق قادياني گروھ يا لاھوري گروھ جي ڪنھن رڪن کي جيڪو پاڻ کي 'احمدي' يا ڪنھن ٻي نالي سان پڪاري جي لاءِ ممنوع قرار " + + "ڏنو ويو آھي تہ ھو (الف) ڳالھائي، لکي يا ڪنھن ٻي طريقي سان ڪنھن خليفي يا آنحضور ﷺ جي ڪنھن صحابي کان علاوہڍه ڪنھن کي امير" + + " المومنين يا" + + " خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين " + + "چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔" }; + + public void testSingleSplit() { + var chunker = new WordBoundaryChunker(); + var chunks = chunker.chunk(TEST_TEXT, 10_000, 0); + assertThat(chunks, hasSize(1)); + assertEquals(TEST_TEXT, chunks.get(0)); + } + + public void testChunkSizeOnWhiteSpaceNoOverlap() { + var numWhiteSpaceSeparatedWords = TEST_TEXT.split("\\s+").length; + var chunker = new WordBoundaryChunker(); + + for (var chunkSize : new int[] { 10, 20, 100, 300 }) { + var chunks = chunker.chunk(TEST_TEXT, chunkSize, 0); + int expectedNumChunks = (numWhiteSpaceSeparatedWords + chunkSize - 1) / chunkSize; + assertThat("chunk size= " + chunkSize, chunks, hasSize(expectedNumChunks)); + } + } + + public void testMultilingual() { + var chunker = new WordBoundaryChunker(); + for (var input : MULTI_LINGUAL) { + var chunks = chunker.chunk(input, 10, 0); + assertTrue(chunks.size() > 1); + } + } + + public void testNumberOfChunks() { + for (int numWords : new int[] { 10, 22, 50, 73, 100 }) { + var sb = new StringBuilder(); + for (int i = 0; i < numWords; i++) { + sb.append(i).append(' '); + } + var whiteSpacedText = sb.toString(); + assertExpectedNumberOfChunks(whiteSpacedText, numWords, 10, 4); + assertExpectedNumberOfChunks(whiteSpacedText, numWords, 10, 2); + assertExpectedNumberOfChunks(whiteSpacedText, numWords, 20, 4); + assertExpectedNumberOfChunks(whiteSpacedText, numWords, 20, 10); + } + } + + public void testWindowSpanningWithOverlapNumWordsInOverlapSection() { + int chunkSize = 10; + int windowSize = 3; + for (int numWords : new int[] { 7, 8, 9, 10 }) { + var sb = new StringBuilder(); + for (int i = 0; i < numWords; i++) { + sb.append(i).append(' '); + } + var chunks = new WordBoundaryChunker().chunk(sb.toString(), chunkSize, windowSize); + assertEquals("numWords= " + numWords, 1, chunks.size()); + } + + var sb = new StringBuilder(); + for (int i = 0; i < 11; i++) { + sb.append(i).append(' '); + } + var chunks = new WordBoundaryChunker().chunk(sb.toString(), chunkSize, windowSize); + assertEquals(2, chunks.size()); + } + + public void testWindowSpanningWords() { + int numWords = randomIntBetween(4, 120); + var input = new StringBuilder(); + for (int i = 0; i < numWords; i++) { + input.append(i).append(' '); + } + var whiteSpacedText = input.toString().stripTrailing(); + + var chunks = new WordBoundaryChunker().chunk(whiteSpacedText, 20, 10); + assertChunkContents(chunks, numWords, 20, 10); + chunks = new WordBoundaryChunker().chunk(whiteSpacedText, 10, 4); + assertChunkContents(chunks, numWords, 10, 4); + chunks = new WordBoundaryChunker().chunk(whiteSpacedText, 15, 3); + assertChunkContents(chunks, numWords, 15, 3); + } + + private void assertChunkContents(List chunks, int numWords, int windowSize, int overlap) { + int start = 0; + int chunkIndex = 0; + int newWordsPerWindow = windowSize - overlap; + boolean reachedEnd = false; + while (reachedEnd == false) { + var sb = new StringBuilder(); + // the trailing whitespace from the previous chunk is + // included in this chunk + if (chunkIndex > 0) { + sb.append(" "); + } + int end = Math.min(start + windowSize, numWords); + for (int i = start; i < end; i++) { + sb.append(i).append(' '); + } + // delete the trailing whitespace + sb.deleteCharAt(sb.length() - 1); + + assertEquals("numWords= " + numWords, sb.toString(), chunks.get(chunkIndex)); + + reachedEnd = end == numWords; + start += newWordsPerWindow; + chunkIndex++; + } + + assertEquals("numWords= " + numWords, chunks.size(), chunkIndex); + } + + public void testWindowSpanning_TextShorterThanWindow() { + var sb = new StringBuilder(); + for (int i = 0; i < 8; i++) { + sb.append(i).append(' '); + } + + // window size is > num words + var chunks = new WordBoundaryChunker().chunk(sb.toString(), 10, 5); + assertThat(chunks, hasSize(1)); + } + + public void testEmptyString() { + var chunks = new WordBoundaryChunker().chunk("", 10, 5); + assertThat(chunks, contains("")); + } + + public void testWhitespace() { + var chunks = new WordBoundaryChunker().chunk(" ", 10, 5); + assertThat(chunks, contains(" ")); + } + + public void testPunctuation() { + int chunkSize = 1; + var chunks = new WordBoundaryChunker().chunk("Comma, separated", chunkSize, 0); + assertThat(chunks, contains("Comma", ", separated")); + + chunks = new WordBoundaryChunker().chunk("Mme. Thénardier", chunkSize, 0); + assertThat(chunks, contains("Mme", ". Thénardier")); + + chunks = new WordBoundaryChunker().chunk("Won't you chunk", chunkSize, 0); + assertThat(chunks, contains("Won't", " you", " chunk")); + + chunkSize = 10; + chunks = new WordBoundaryChunker().chunk("Won't you chunk", chunkSize, 0); + assertThat(chunks, contains("Won't you chunk")); + } + + private void assertExpectedNumberOfChunks(String input, int numWords, int windowSize, int overlap) { + var chunks = new WordBoundaryChunker().chunk(input, windowSize, overlap); + int expected = expectedNumberOfChunks(numWords, windowSize, overlap); + assertEquals(expected, chunks.size()); + } + + private int expectedNumberOfChunks(int numWords, int windowSize, int overlap) { + if (numWords < windowSize) { + return 1; + } + + // the first chunk has windowSize words, because of overlap + // the subsequent will consume fewer new words + int wordsRemainingAfterFirstChunk = numWords - windowSize; + int newWordsPerWindow = windowSize - overlap; + int numberOfFollowingChunks = (wordsRemainingAfterFirstChunk + newWordsPerWindow - 1) / newWordsPerWindow; + // the +1 accounts for the first chunk + return 1 + numberOfFollowingChunks; + } + + public void testInvalidParams() { + var chunker = new WordBoundaryChunker(); + var e = expectThrows(IllegalArgumentException.class, () -> chunker.chunk("not evaluated", 4, 10)); + assertThat(e.getMessage(), containsString("Invalid chunking parameters, overlap [10] must be < chunk size / 2 [4 / 2 = 2]")); + + e = expectThrows(IllegalArgumentException.class, () -> chunker.chunk("not evaluated", 10, 6)); + assertThat(e.getMessage(), containsString("Invalid chunking parameters, overlap [6] must be < chunk size / 2 [10 / 2 = 5]")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java new file mode 100644 index 0000000000000..9b18f5536713e --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class ChunkedTextEmbeddingFloatResultsTests extends AbstractWireSerializingTestCase { + + public static ChunkedTextEmbeddingFloatResults createRandomResults() { + int numChunks = randomIntBetween(1, 5); + var chunks = new ArrayList(numChunks); + + for (int i = 0; i < numChunks; i++) { + chunks.add(createRandomChunk()); + } + + return new ChunkedTextEmbeddingFloatResults(chunks); + } + + private static ChunkedTextEmbeddingFloatResults.EmbeddingChunk createRandomChunk() { + int columns = randomIntBetween(1, 10); + List floats = new ArrayList<>(columns); + + for (int i = 0; i < columns; i++) { + floats.add(randomFloat()); + } + + return new ChunkedTextEmbeddingFloatResults.EmbeddingChunk(randomAlphaOfLength(6), floats); + } + + @Override + protected Writeable.Reader instanceReader() { + return ChunkedTextEmbeddingFloatResults::new; + } + + @Override + protected ChunkedTextEmbeddingFloatResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected ChunkedTextEmbeddingFloatResults mutateInstance(ChunkedTextEmbeddingFloatResults instance) throws IOException { + return null; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 1e2498abfe416..e75dfc4ec798e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -32,9 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingByteResults; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -63,7 +61,6 @@ import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; @@ -1162,11 +1159,12 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec } } - public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOException { + public void testChunkedInfer_BatchesCalls() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + // Batching will call the service with 2 inputs String responseJson = """ { "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", @@ -1178,6 +1176,10 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti [ 0.123, -0.123 + ], + [ + 0.223, + -0.223 ] ] }, @@ -1204,9 +1206,10 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti null ); PlainActionFuture> listener = new PlainActionFuture<>(); + // 2 inputs service.chunkedInfer( model, - List.of("abc"), + List.of("foo", "bar"), new HashMap<>(), InputType.UNSPECIFIED, new ChunkingOptions(null, null), @@ -1214,25 +1217,23 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti listener ); - var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + var results = listener.actionGet(TIMEOUT); + assertThat(results, hasSize(2)); + { + assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(floatResult.chunks(), hasSize(1)); + assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(List.of(0.123f, -0.123f), floatResult.chunks().get(0).embedding()); + } + { + assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(floatResult.chunks(), hasSize(1)); + assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(List.of(0.223f, -0.223f), floatResult.chunks().get(0).embedding()); + } - MatcherAssert.assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), - Matchers.is( - Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.123f, (double) -0.123f) - ) - ) - ) - ) - ); MatcherAssert.assertThat(webServer.requests(), hasSize(1)); assertNull(webServer.requests().get(0).getUri().getQuery()); MatcherAssert.assertThat( @@ -1244,92 +1245,13 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti var requestMap = entityAsMap(webServer.requests().get(0).getBody()); MatcherAssert.assertThat( requestMap, - is(Map.of("texts", List.of("abc"), "model", "model", "embedding_types", List.of("float"))) + is(Map.of("texts", List.of("foo", "bar"), "model", "model", "embedding_types", List.of("float"))) ); } } public void testChunkedInfer_CallsInfer_ConvertsByteResponse() throws IOException { - var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); - - try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { - - String responseJson = """ - { - "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", - "texts": [ - "hello" - ], - "embeddings": { - "int8": [ - [ - 12, - -12 - ] - ] - }, - "meta": { - "api_version": { - "version": "1" - }, - "billed_units": { - "input_tokens": 1 - } - }, - "response_type": "embeddings_by_type" - } - """; - webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - - var model = CohereEmbeddingsModelTests.createModel( - getUrl(webServer), - "secret", - new CohereEmbeddingsTaskSettings(null, null), - 1024, - 1024, - "model", - CohereEmbeddingType.INT8 - ); - PlainActionFuture> listener = new PlainActionFuture<>(); - service.chunkedInfer( - model, - List.of("abc"), - new HashMap<>(), - InputType.UNSPECIFIED, - new ChunkingOptions(null, null), - InferenceAction.Request.DEFAULT_TIMEOUT, - listener - ); - - var result = listener.actionGet(TIMEOUT).get(0); - - MatcherAssert.assertThat( - result.asMap(), - Matchers.is( - Map.of( - ChunkedTextEmbeddingByteResults.FIELD_NAME, - List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((byte) 12, (byte) -12) - ) - ) - ) - ) - ); - MatcherAssert.assertThat(webServer.requests(), hasSize(1)); - assertNull(webServer.requests().get(0).getUri().getQuery()); - MatcherAssert.assertThat( - webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), - equalTo(XContentType.JSON.mediaType()) - ); - MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); - - var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "embedding_types", List.of("int8")))); - } + // TODO byte response not implemented yet } private Map getRequestConfigMap( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 902b81250dafc..70d7181106810 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -31,8 +31,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; -import org.elasticsearch.xpack.core.ml.inference.results.ChunkedNlpInferenceResults; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; @@ -60,7 +59,6 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; -import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; @@ -1213,11 +1211,12 @@ public void testMoveModelFromTaskToServiceSettings_AlreadyMoved() { assertEquals("model", serviceSettings.get(ServiceFields.MODEL_ID)); } - public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOException { + public void testChunkedInfer_Batches() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + // response with 2 embeddings String responseJson = """ { "object": "list", @@ -1229,6 +1228,14 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti 0.123, -0.123 ] + }, + { + "object": "embedding", + "index": 1, + "embedding": [ + 0.223, + -0.223 + ] } ], "model": "text-embedding-ada-002-v2", @@ -1244,7 +1251,7 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti PlainActionFuture> listener = new PlainActionFuture<>(); service.chunkedInfer( model, - List.of("abc"), + List.of("foo", "bar"), new HashMap<>(), InputType.INGEST, new ChunkingOptions(null, null), @@ -1252,25 +1259,23 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti listener ); - var result = listener.actionGet(TIMEOUT).get(0); - assertThat(result, CoreMatchers.instanceOf(ChunkedTextEmbeddingResults.class)); + var results = listener.actionGet(TIMEOUT); + assertThat(results, hasSize(2)); + { + assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(floatResult.chunks(), hasSize(1)); + assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(List.of(0.123f, -0.123f), floatResult.chunks().get(0).embedding()); + } + { + assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedTextEmbeddingFloatResults.class)); + var floatResult = (ChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(floatResult.chunks(), hasSize(1)); + assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(List.of(0.223f, -0.223f), floatResult.chunks().get(0).embedding()); + } - assertThat( - asMapWithListsInsteadOfArrays((ChunkedTextEmbeddingResults) result), - Matchers.is( - Map.of( - ChunkedTextEmbeddingResults.FIELD_NAME, - List.of( - Map.of( - ChunkedNlpInferenceResults.TEXT, - "abc", - ChunkedNlpInferenceResults.INFERENCE, - List.of((double) 0.123f, (double) -0.123f) - ) - ) - ) - ) - ); assertThat(webServer.requests(), hasSize(1)); assertNull(webServer.requests().get(0).getUri().getQuery()); assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); @@ -1279,7 +1284,7 @@ public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOExcepti var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), Matchers.is(3)); - assertThat(requestMap.get("input"), Matchers.is(List.of("abc"))); + assertThat(requestMap.get("input"), Matchers.is(List.of("foo", "bar"))); assertThat(requestMap.get("model"), Matchers.is("model")); assertThat(requestMap.get("user"), Matchers.is("user")); } From 119f81f04305b91b9bfe9866525888ef17d35423 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Mon, 15 Apr 2024 13:08:31 +0200 Subject: [PATCH 008/130] [Profiling] Add CO2/costs for TopN on top-level (#107304) Wit this commit we add CO2 emission and cost information on the top-level of the TopN functions API response. This is needed by the UI to show summary info. --- .../action/GetTopNFunctionsResponse.java | 22 ++++++++++++++++++- .../xpack/profiling/action/TopNFunction.java | 8 +++++++ .../TransportGetTopNFunctionsAction.java | 7 +++++- .../action/TopNFunctionsBuilderTests.java | 10 ++++++++- 4 files changed, 44 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java index b16ce6f43685f..a42e64546058c 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsResponse.java @@ -19,11 +19,21 @@ public class GetTopNFunctionsResponse extends ActionResponse implements ToXContentObject { private final long selfCount; private final long totalCount; + private final double annualCo2Tons; + private final double annualCostsUsd; private final List topNFunctions; - public GetTopNFunctionsResponse(long selfCount, long totalCount, List topNFunctions) { + public GetTopNFunctionsResponse( + long selfCount, + long totalCount, + double annualCo2Tons, + double annualCostsUsd, + List topNFunctions + ) { this.selfCount = selfCount; this.totalCount = totalCount; + this.annualCo2Tons = annualCo2Tons; + this.annualCostsUsd = annualCostsUsd; this.topNFunctions = topNFunctions; } @@ -40,6 +50,14 @@ public long getTotalCount() { return totalCount; } + public double getAnnualCo2Tons() { + return annualCo2Tons; + } + + public double getAnnualCostsUsd() { + return annualCostsUsd; + } + public List getTopN() { return topNFunctions; } @@ -49,6 +67,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("self_count", selfCount); builder.field("total_count", totalCount); + builder.field("self_annual_co2_tons", annualCo2Tons); + builder.field("self_annual_cost_usd", annualCostsUsd); builder.xContentList("topn", topNFunctions); builder.endObject(); return builder; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java index 402d2ff012839..800b006b3cc17 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TopNFunction.java @@ -123,6 +123,10 @@ public void addTotalCount(long totalCount) { this.totalCount += totalCount; } + public double getSelfAnnualCO2Tons() { + return selfAnnualCO2Tons; + } + public void addSelfAnnualCO2Tons(double co2Tons) { this.selfAnnualCO2Tons += co2Tons; } @@ -131,6 +135,10 @@ public void addTotalAnnualCO2Tons(double co2Tons) { this.totalAnnualCO2Tons += co2Tons; } + public double getSelfAnnualCostsUSD() { + return selfAnnualCostsUSD; + } + public void addSelfAnnualCostsUSD(double costs) { this.selfAnnualCostsUSD += costs; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java index e5d67c0b005e2..f8c8d24a0864b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetTopNFunctionsAction.java @@ -134,17 +134,22 @@ public GetTopNFunctionsResponse build() { functions.sort(Collections.reverseOrder()); long sumSelfCount = 0; long sumTotalCount = 0; + double sumAnnualCo2Tons = 0.0d; + double sumAnnualCostsUsd = 0.0d; + for (int i = 0; i < functions.size(); i++) { TopNFunction topNFunction = functions.get(i); topNFunction.setRank(i + 1); sumSelfCount += topNFunction.getSelfCount(); sumTotalCount += topNFunction.getTotalCount(); + sumAnnualCo2Tons += topNFunction.getSelfAnnualCO2Tons(); + sumAnnualCostsUsd += topNFunction.getSelfAnnualCostsUSD(); } // limit at the end so global stats are independent of the limit if (limit != null && limit > 0 && limit < functions.size()) { functions = functions.subList(0, limit); } - return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, functions); + return new GetTopNFunctionsResponse(sumSelfCount, sumTotalCount, sumAnnualCo2Tons, sumAnnualCostsUsd, functions); } public boolean isExists(String frameGroupID) { diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java index 26c0f066dd092..a2b6974fb2e25 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/TopNFunctionsBuilderTests.java @@ -21,6 +21,9 @@ public void testBuildFunctions() { assertEquals(7L, response.getSelfCount()); assertEquals(14L, response.getTotalCount()); + assertEquals(1.5d, response.getAnnualCo2Tons(), 0.001d); + assertEquals(48.2d, response.getAnnualCostsUsd(), 0.001d); + assertEquals(2, response.getTopN().size()); assertEquals(foo, response.getTopN().get(0)); assertEquals(bar, response.getTopN().get(1)); @@ -35,9 +38,12 @@ public void testBuildFunctionsWithLimitSmallerThanAvailableFunctionCount() { GetTopNFunctionsResponse response = builder.build(); - // total counts are independent of the limit + // total values are independent of the limit assertEquals(7L, response.getSelfCount()); assertEquals(14L, response.getTotalCount()); + assertEquals(1.5d, response.getAnnualCo2Tons(), 0.001d); + assertEquals(48.2d, response.getAnnualCostsUsd(), 0.001d); + assertEquals(1, response.getTopN().size()); assertEquals(foo, response.getTopN().get(0)); } @@ -53,6 +59,8 @@ public void testBuildFunctionsWithLimitHigherThanAvailableFunctionCount() { assertEquals(7L, response.getSelfCount()); assertEquals(14L, response.getTotalCount()); + assertEquals(1.5d, response.getAnnualCo2Tons(), 0.001d); + assertEquals(48.2d, response.getAnnualCostsUsd(), 0.001d); // still limited to the available two functions assertEquals(2, response.getTopN().size()); assertEquals(foo, response.getTopN().get(0)); From e70e5397b7395ce8ec51d7169381b8cf19a2e583 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Mon, 15 Apr 2024 13:58:22 +0200 Subject: [PATCH 009/130] Remove historical features for YAML REST tests in favor of synthetic version features (#107393) --- .../data_stream/140_data_stream_aliases.yml | 12 +- .../data_stream/170_modify_data_stream.yml | 6 +- .../lifecycle/10_explain_lifecycle.yml | 2 +- .../test/data_stream/lifecycle/20_basic.yml | 2 +- .../data_stream/lifecycle/30_not_found.yml | 8 +- .../lifecycle/40_global_retention.yml | 2 +- .../rest-api-spec/test/bulk/10_basic.yml | 6 +- .../test/bulk/11_dynamic_templates.yml | 4 +- .../rest-api-spec/test/bulk/90_pipeline.yml | 2 +- .../test/cat.aliases/10_basic.yml | 22 +-- .../test/cat.aliases/40_hidden.yml | 6 +- .../test/cat.allocation/10_basic.yml | 2 +- .../test/cat.indices/10_basic.yml | 8 +- .../test/cat.indices/20_hidden.yml | 10 +- .../test/cat.plugins/10_basic.yml | 2 +- .../test/cat.recovery/10_basic.yml | 4 +- .../test/cat.shards/10_basic.yml | 8 +- .../rest-api-spec/test/cat.tasks/10_basic.yml | 2 +- .../test/cat.templates/10_basic.yml | 16 +- .../cluster.allocation_explain/10_basic.yml | 4 +- .../cluster.component_template/10_basic.yml | 8 +- .../test/cluster.desired_balance/10_basic.yml | 18 +-- .../test/cluster.desired_nodes/20_dry_run.yml | 4 +- .../test/cluster.health/10_basic.yml | 4 +- .../cluster.health/30_indices_options.yml | 2 +- .../test/cluster.info/10_info_all.yml | 2 +- .../test/cluster.info/20_info_http.yml | 2 +- .../test/cluster.info/30_info_thread_pool.yml | 2 +- .../test/cluster.info/40_info_script.yml | 2 +- .../10_basic.yml | 8 +- .../test/cluster.stats/10_basic.yml | 12 +- .../cluster.stats/20_indexing_pressure.yml | 2 +- .../test/indices.create/10_basic.yml | 4 +- .../indices.get_index_template/10_basic.yml | 8 +- .../test/indices.put_mapping/10_basic.yml | 31 ++-- .../indices.update_aliases/40_must_exist.yml | 16 +- .../rest-api-spec/test/update/16_noop.yml | 4 +- .../rest/yaml/ESClientYamlSuiteTestCase.java | 6 - .../rest/yaml/YamlTestLegacyFeatures.java | 153 ------------------ .../test/enrich/50_data_stream.yml | 4 +- 40 files changed, 131 insertions(+), 289 deletions(-) delete mode 100644 test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml index 6eaa751f845a2..b9042d51deb70 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/140_data_stream_aliases.yml @@ -309,10 +309,10 @@ - is_false: ds-first.aliases.my-alias --- "Action Results with multiple matching aliases": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: ["gte_v8.14.0"] reason: "alias action results do not work until 8.14" - features: allowed_warnings + test_runner_features: ["allowed_warnings"] - do: allowed_warnings: - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" @@ -349,10 +349,10 @@ - match: { action_results.1.error.type: aliases_not_found_exception } --- "Single action result per action": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "alias action results do not work until 8.14" - features: allowed_warnings + test_runner_features: [ "allowed_warnings" ] - do: allowed_warnings: - "index template [my-template] has index patterns [log-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml index 5edcf0e61c56b..8c0e27373664d 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml @@ -91,10 +91,10 @@ --- "Modify a data stream's failure store": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "this API was released in 8.14.0" - features: allowed_warnings + test_runner_features: [ "allowed_warnings" ] - do: allowed_warnings: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml index ca579aea4b7ef..c79775c51c392 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml @@ -1,7 +1,7 @@ --- "Explain backing index lifecycle": - skip: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was released in 8.14" features: allowed_warnings - do: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml index 3f0a91db2d7f2..ea34c6880d1f6 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml @@ -1,7 +1,7 @@ setup: - skip: features: allowed_warnings - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycles with global retention are only supported in 8.14+" - do: allowed_warnings: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml index 0687c00cac8d2..303fbddd6c19c 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml @@ -1,7 +1,7 @@ setup: - skip: features: allowed_warnings - cluster_features: ["datastream_lifecycle"] + cluster_features: ["gte_v8.11.0"] reason: "Data stream lifecycle was GA in 8.11" - do: allowed_warnings: @@ -24,7 +24,7 @@ setup: --- "Get data stream lifecycle": - skip: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was released in 8.14" - do: @@ -49,7 +49,7 @@ setup: --- "Put data stream lifecycle does not succeed when at lease one data stream does not exist": - skip: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was released in 8.14" - do: catch: missing @@ -72,7 +72,7 @@ setup: --- "Delete data stream lifecycle does not succeed when at lease one data stream does not exist": - skip: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was released in 8.14" - do: catch: missing diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml index c4c1c4b928fcd..93df045e4568e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml @@ -1,7 +1,7 @@ setup: - skip: features: allowed_warnings - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Global retention was added in 8.14" - do: allowed_warnings: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml index 2fde1f48e93df..f4f6245603aab 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml @@ -60,7 +60,7 @@ --- "Empty _id with op_type create": - requires: - cluster_features: ["bulk_auto_id"] + cluster_features: ["gte_v7.5.0"] reason: "auto id + op type create only supported since 7.5" - do: @@ -120,7 +120,7 @@ --- "When setting require_alias flag per request": - requires: - cluster_features: ["bulk_require_alias"] + cluster_features: ["gte_v7.10.0"] reason: "require_alias flag was added in version 7.10" - do: @@ -163,7 +163,7 @@ --- "When setting require_alias flag": - requires: - cluster_features: ["bulk_require_alias"] + cluster_features: ["gte_v7.10.0"] reason: "require_alias flag was added in version 7.10" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml index 6e9502fb32f95..70cde585dded7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml @@ -2,7 +2,7 @@ "Dynamic templates": - requires: test_runner_features: ["contains"] - cluster_features: ["bulk_dynamic_template_document_parse_exception"] + cluster_features: ["gte_v8.8.0"] reason: "Exception type has changed in 8.8.0" - do: @@ -176,7 +176,7 @@ --- "Dynamic templates with op_type": - requires: - cluster_features: ["bulk_dynamic_template_op_type"] + cluster_features: ["gte_v8.6.1"] reason: "bug fixed in 8.6.1" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml index 3e919c5960278..7e541d5d13e7d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml @@ -1,7 +1,7 @@ --- "One request has pipeline and another not": - requires: - cluster_features: ["bulk_pipeline_validate"] + cluster_features: ["gte_v7.9.1"] reason: "fixed in 7.9.1" - do: bulk: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml index fff8c3f499ba7..2e5234bd1ced1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/10_basic.yml @@ -2,7 +2,7 @@ --- "Help": - requires: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -22,7 +22,7 @@ --- "Help (pre 7.4.0)": - skip: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -52,7 +52,7 @@ --- "Simple alias": - requires: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -81,7 +81,7 @@ --- "Simple alias (pre 7.4.0)": - skip: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -109,7 +109,7 @@ --- "Complex alias": - requires: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -149,7 +149,7 @@ --- "Complex alias (pre 7.4.0)": - skip: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -270,7 +270,7 @@ --- "Column headers": - requires: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -307,7 +307,7 @@ --- "Column headers (pre 7.4.0)": - skip: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - do: @@ -374,7 +374,7 @@ --- "Alias against closed index": - requires: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" test_runner_features: ["allowed_warnings"] @@ -409,7 +409,7 @@ --- "Alias against closed index (pre 7.4.0)": - skip: - cluster_features: "cat_aliases_show_write_index" + cluster_features: "gte_v7.4.0" reason: "is_write_index is shown in cat.aliases starting version 7.4.0" - requires: @@ -488,7 +488,7 @@ --- "Deprecated local parameter": - requires: - cluster_features: ["cat_aliases_local_deprecated"] + cluster_features: ["gte_v8.12.0"] test_runner_features: ["warnings"] reason: verifying deprecation warnings from 8.12.0 onwards diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/40_hidden.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/40_hidden.yml index 663668fb4b7af..a990524ed287e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/40_hidden.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.aliases/40_hidden.yml @@ -1,7 +1,7 @@ --- "Test cat aliases output with a hidden index with a hidden alias": - requires: - cluster_features: ["cat_aliases_hidden"] + cluster_features: ["gte_v7.7.0"] reason: "hidden indices and aliases were added in 7.7.0" - do: @@ -58,7 +58,7 @@ --- "Test cat aliases output with a hidden index with a visible alias": - requires: - cluster_features: ["cat_aliases_hidden"] + cluster_features: ["gte_v7.7.0"] reason: "hidden indices and aliases were added in 7.7.0" - do: @@ -105,7 +105,7 @@ --- "Test cat aliases output with a visible index with a hidden alias": - requires: - cluster_features: ["cat_aliases_hidden"] + cluster_features: ["gte_v7.7.0"] reason: "hidden indices and aliases were added in 7.7.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.allocation/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.allocation/10_basic.yml index 4f18b331325a4..0d660cbb6b048 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.allocation/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.allocation/10_basic.yml @@ -262,7 +262,7 @@ "Node roles": - requires: - cluster_features: ["cat_allocation_node_role"] + cluster_features: ["gte_v8.10.0"] reason: "node.role column added in 8.10.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml index 981a934a719ca..d687462df5872 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml @@ -10,7 +10,7 @@ --- "Test cat indices output": - requires: - cluster_features: ["cat_indices_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: dataset size was added in 8.11.0 - do: @@ -72,7 +72,7 @@ "Test cat indices output for closed index (pre 7.2.0)": - skip: reason: "closed indices are replicated starting version 7.2.0" - cluster_features: ["indices_replicate_closed"] + cluster_features: ["gte_v7.2.0"] - requires: test_runner_features: ["allowed_warnings"] @@ -117,7 +117,7 @@ "Test cat indices output for closed index": - skip: reason: "closed indices are replicated starting version 7.2.0" - cluster_features: ["indices_replicate_closed"] + cluster_features: ["gte_v7.2.0"] - requires: test_runner_features: ["allowed_warnings"] @@ -310,7 +310,7 @@ --- "Test cat indices with invalid health parameter": - requires: - cluster_features: ["cat_indices_validate_health_param"] + cluster_features: ["gte_v7.8.0"] reason: "fixed in 7.7.1+" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/20_hidden.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/20_hidden.yml index 49c319f9ba425..619d2d5374b99 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/20_hidden.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/20_hidden.yml @@ -1,7 +1,7 @@ --- "Test cat indices output for hidden index": - requires: - cluster_features: ["cat_indices_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: "dataset size was added in 8.11.0" - do: indices.create: @@ -42,7 +42,7 @@ --- "Test cat indices output for dot-hidden index and dot-prefixed pattern": - requires: - cluster_features: ["cat_indices_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: "dataset size was added in 8.11.0" - do: indices.create: @@ -81,7 +81,7 @@ --- "Test cat indices output with a hidden index with a visible alias": - requires: - cluster_features: ["cat_indices_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: "dataset size was added in 8.11.0" - do: @@ -146,7 +146,7 @@ --- "Test cat indices output with a hidden index with a hidden alias": - requires: - cluster_features: ["cat_indices_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: "dataset size was added in 8.11.0" - do: @@ -210,7 +210,7 @@ --- "Test cat indices output with a hidden index, dot-hidden alias and dot pattern": - requires: - cluster_features: ["cat_indices_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: "dataset size was added in 8.11.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.plugins/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.plugins/10_basic.yml index d5067e4d80d89..4d698f4810024 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.plugins/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.plugins/10_basic.yml @@ -1,7 +1,7 @@ --- "Help": - requires: - cluster_features: ["cat_plugins_new_format"] + cluster_features: ["gte_v7.12.0"] reason: output format changed in 7.12.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml index e06435c4736d8..21b307c42398d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml @@ -1,7 +1,7 @@ --- "Test cat recovery output": - requires: - cluster_features: ["cat_recovery_new_bytes_format"] + cluster_features: ["gte_v8.0.0"] reason: format of bytes output changed in 8.0.0 - do: @@ -82,7 +82,7 @@ --- "Test cat recovery output for closed index": - requires: - cluster_features: ["cat_recovery_new_bytes_format"] + cluster_features: ["gte_v8.0.0"] reason: format of bytes output changed in 8.0.0 test_runner_features: ["allowed_warnings"] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.shards/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.shards/10_basic.yml index d1bd0d7627f49..b4147bcfc676e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.shards/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.shards/10_basic.yml @@ -1,7 +1,7 @@ --- "Help": - requires: - cluster_features: ["cat_shards_dataset_size"] + cluster_features: ["gte_v8.11.0"] reason: dataset size was added in 8.11.0 - do: cat.shards: @@ -91,7 +91,7 @@ --- "Test cat shards output": - requires: - cluster_features: [ "cat_shards_dataset_size" ] + cluster_features: [ "gte_v8.11.0" ] reason: dataset size was added in 8.11.0 - do: @@ -188,7 +188,7 @@ --- "Test cat shards sort": - requires: - cluster_features: [ "cat_shards_dataset_size" ] + cluster_features: [ "gte_v8.11.0" ] reason: dataset size was added in 8.11.0 - do: @@ -241,7 +241,7 @@ --- "Test cat shards with hidden indices": - requires: - cluster_features: ["cat_shards_fix_hidden_indices"] + cluster_features: ["gte_v8.3.0"] reason: hidden indices were misreported in versions before 8.3.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.tasks/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.tasks/10_basic.yml index b0007870f1d74..05c65eb64c31b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.tasks/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.tasks/10_basic.yml @@ -21,7 +21,7 @@ --- "Test cat tasks output with X-Opaque-Id": - requires: - cluster_features: ["cat_tasks_x_opaque_id"] + cluster_features: ["gte_v7.10.0"] reason: support for opaque_id was added in 7.10.0 test_runner_features: ["headers"] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml index 5270d215f8cea..f217834e62a5b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml @@ -1,7 +1,7 @@ --- "Help": - requires: - cluster_features: ["templates_v2"] + cluster_features: ["gte_v7.8.0"] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -32,7 +32,7 @@ --- "Normal templates": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -83,7 +83,7 @@ --- "Filtered templates": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -125,7 +125,7 @@ --- "Column headers": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -163,7 +163,7 @@ --- "Select columns": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -197,7 +197,7 @@ --- "Sort templates": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: default_shards, no_xpack @@ -250,7 +250,7 @@ --- "Multiple template": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: default_shards, no_xpack @@ -286,7 +286,7 @@ --- "Mixture of legacy and composable templates": - requires: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: allowed_warnings diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml index 8c350b50a6bf2..d045775d695b4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml @@ -50,7 +50,7 @@ --- "Cluster shard allocation explanation test with a closed index": - requires: - cluster_features: ["indices_replicate_closed"] + cluster_features: ["gte_v7.2.0"] reason: closed indices are replicated starting version 7.2.0 test_runner_features: ["allowed_warnings"] @@ -96,7 +96,7 @@ --- "Cluster allocation explanation response includes node's roles": - requires: - cluster_features: ["cluster_allocation_role"] + cluster_features: ["gte_v8.11.0"] reason: The roles field was introduced in 8.11.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml index dcf883e5f4f14..af2d6f946d2ff 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml @@ -1,7 +1,7 @@ --- "Basic CRUD": - requires: - cluster_features: ["templates_v2"] + cluster_features: ["gte_v7.8.0"] reason: "index/component template v2 API unavailable before 7.8" - do: @@ -49,7 +49,7 @@ --- "Delete multiple templates": - requires: - cluster_features: ["cluster_templates_delete_multiple"] + cluster_features: ["gte_v8.0.0"] reason: "not yet backported" - do: @@ -117,7 +117,7 @@ --- "Add data stream lifecycle": - requires: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with global retention was available from 8.14" - do: @@ -147,7 +147,7 @@ --- "Get data stream lifecycle with default rollover": - requires: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was available from 8.14" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml index cd213ebe72a8e..edb167ddbdf6e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml @@ -1,7 +1,7 @@ --- setup: - requires: - cluster_features: ["cluster_desired_balance"] + cluster_features: ["gte_v8.6.0"] reason: "API added in in 8.6.0" --- @@ -62,7 +62,7 @@ setup: "Test cluster_balance_stats": - requires: - cluster_features: ["cluster_desired_balance_stats"] + cluster_features: ["gte_v8.7.0"] reason: "cluster_balance_stats added in in 8.7.0" - do: @@ -110,7 +110,7 @@ setup: "Test cluster_info": - requires: - cluster_features: ["cluster_info"] + cluster_features: [gte_v8.8.0] reason: "cluster_info added in in 8.8.0" - do: @@ -122,7 +122,7 @@ setup: "Test cluster_balance_stats contains node ID and roles": - requires: - cluster_features: ["cluster_desired_balance_extended"] + cluster_features: ["gte_v8.8.0"] reason: "node_id and roles added in in 8.8.0" - do: @@ -141,7 +141,7 @@ setup: "Test tier_preference": - requires: - cluster_features: ["cluster_desired_balance_extended"] + cluster_features: ["gte_v8.8.0"] reason: "tier_preference added in in 8.8.0" - do: @@ -166,7 +166,7 @@ setup: "Test computed_shard_movements": - requires: - cluster_features: ["cluster_desired_balance_extended"] + cluster_features: ["gte_v8.8.0"] reason: "computed_shard_movements added in in 8.8.0" - do: @@ -178,7 +178,7 @@ setup: "Test reset desired balance": - requires: - cluster_features: ["cluster_desired_balance_extended"] + cluster_features: ["gte_v8.8.0"] reason: "reset API added in in 8.8.0" - do: @@ -188,7 +188,7 @@ setup: "Test undesired_shard_allocation_count": - requires: - cluster_features: ["cluster_desired_balance_stats_undesired_count"] + cluster_features: ["gte_v8.12.0"] reason: "undesired_shard_allocation_count added in in 8.12.0" - do: @@ -226,7 +226,7 @@ setup: "Test unassigned_shards, total_allocations, undesired_allocations and undesired_allocations_fraction": - requires: - cluster_features: [ "cluster_desired_balance_stats_undesired_count" ] + cluster_features: [ "gte_v8.12.0" ] reason: "undesired_shard_allocation_count added in in 8.12.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml index ac9a3437df0c1..87fc015347348 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml @@ -1,7 +1,7 @@ --- setup: - requires: - cluster_features: ["cluster_desired_nodes_dry_run"] + cluster_features: ["gte_v8.4.0"] reason: "Support for the dry run option was added in in 8.4.0" --- teardown: @@ -94,7 +94,7 @@ teardown: --- "Test validation works for dry run updates": - skip: - cluster_features: ["cluster_desired_nodes_no_settings_validation"] + cluster_features: ["gte_v8.10.0"] reason: "We started skipping setting validations in 8.10" - do: cluster.state: { } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml index 586bd3b7cfb6b..a01b68e96bbd2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml @@ -132,7 +132,7 @@ --- "cluster health with closed index (pre 7.2.0)": - skip: - cluster_features: ["indices_replicate_closed"] + cluster_features: ["gte_v7.2.0"] reason: "closed indices are replicated starting version 7.2.0" - requires: test_runner_features: ["allowed_warnings"] @@ -210,7 +210,7 @@ --- "cluster health with closed index": - requires: - cluster_features: ["indices_replicate_closed"] + cluster_features: ["gte_v7.2.0"] reason: "closed indices are replicated starting version 7.2.0" test_runner_features: ["allowed_warnings", "default_shards"] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml index 5caa0ebad30b2..8756b35569135 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml @@ -35,7 +35,7 @@ setup: --- "cluster health with expand_wildcards": - requires: - cluster_features: ["cluster_health_indices_options"] + cluster_features: ["gte_v7.2.0"] reason: "indices options has been introduced in cluster health request starting version 7.2.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml index 023e35cd4bee1..5c76703c3a410 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml @@ -1,7 +1,7 @@ --- setup: - requires: - cluster_features: ["cluster_info_extended"] + cluster_features: ["gte_v8.9.0"] reason: "/_info/_all only available from v8.9" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml index ff7d982b14fee..c4fb05a8c899f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - requires: - cluster_features: ["cluster_info_extended"] + cluster_features: ["gte_v8.9.0"] reason: "/_info/http only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml index 261f1d8ea56cb..02a5666639543 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - requires: - cluster_features: ["cluster_info_extended"] + cluster_features: ["gte_v8.9.0"] reason: "/_info/thread_pool only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml index bb7597537014e..bf6662d479748 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - requires: - cluster_features: ["cluster_info_extended"] + cluster_features: ["gte_v8.9.0"] reason: "/_info/script only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml index fda715e416ac2..508971c7c4775 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml @@ -2,7 +2,7 @@ "Prevalidation basic test": - requires: test_runner_features: ["contains"] - cluster_features: ["cluster_prevalidate_node_removal_reason"] + cluster_features: ["gte_v8.7.0"] reason: "The reason field was introduced in 8.7.0" # Fetch a node ID and stash it in node_id @@ -20,7 +20,7 @@ --- "Prevalidation with no node specified": - requires: - cluster_features: ["cluster_prevalidate_node_removal"] + cluster_features: ["gte_v8.6.0"] reason: "API added in 8.6.0" - do: catch: bad_request @@ -32,7 +32,7 @@ --- "Prevalidation with more than one query parameter": - requires: - cluster_features: ["cluster_prevalidate_node_removal"] + cluster_features: ["gte_v8.6.0"] reason: "API added in 8.6.0" - do: catch: bad_request @@ -46,7 +46,7 @@ --- "Prevalidation with non-existing node": - requires: - cluster_features: ["cluster_prevalidate_node_removal"] + cluster_features: ["gte_v8.6.0"] reason: "API added in 8.6.0" - do: catch: missing diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml index 2c48282332909..b38a03d53f89f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml @@ -76,7 +76,7 @@ "get cluster stats returns packaging types": - requires: - cluster_features: ["cluster_stats_packaging_types"] + cluster_features: ["gte_v7.2.0"] reason: "packaging types are added for v7.2.0" - do: @@ -86,7 +86,7 @@ --- "get cluster stats without runtime fields": - requires: - cluster_features: ["cluster_stats_runtime_fields"] + cluster_features: ["gte_v7.13.0"] reason: "cluster stats includes runtime fields from 7.13 on" - do: indices.create: @@ -99,7 +99,7 @@ --- "Usage stats with script-less runtime fields": - requires: - cluster_features: ["cluster_stats_runtime_fields"] + cluster_features: ["gte_v7.13.0"] reason: "cluster stats includes runtime fields from 7.13 on" - do: indices.create: @@ -169,7 +169,7 @@ --- "mappings sizes reported in get cluster stats": - requires: - cluster_features: ["cluster_stats_mapping_sizes"] + cluster_features: ["gte_v8.4.0"] reason: "mapping sizes reported from 8.4 onwards" - do: indices.create: @@ -189,7 +189,7 @@ --- "snapshot stats reported in get cluster stats": - requires: - cluster_features: ["cluster_stats_snapshots"] + cluster_features: ["gte_v8.8.0"] reason: "snapshot stats reported from 8.8 onwards" - do: @@ -232,7 +232,7 @@ --- "Dense vector stats": - requires: - cluster_features: ["cluster_stats_dense_vectors"] + cluster_features: ["gte_v8.10.0"] reason: "dense vector stats added in 8.10" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml index 648964d9e721f..d3b7a68be9bb4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml @@ -1,7 +1,7 @@ --- "Indexing pressure cluster stats": - requires: - cluster_features: ["cluster_stats_indexing_pressure"] + cluster_features: ["gte_v8.1.0"] reason: "indexing_pressure in cluster was added in 8.1" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml index 20ffc0c89e5af..8242b7cdd29e7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -122,8 +122,8 @@ --- "Create index without soft deletes": - - skip: - version: " - 7.99.99" + - requires: + cluster_features: ["gte_v8.0.0"] reason: "indices without soft-deletes is no longer supported " - do: catch: /illegal_argument_exception/ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml index 577665486da6e..dc3361fefab6e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml @@ -1,6 +1,6 @@ setup: - skip: - cluster_features: [ "templates_v2" ] + cluster_features: [ "gte_v7.8.0" ] reason: "index template v2 API unavailable before 7.8" features: allowed_warnings @@ -93,7 +93,7 @@ setup: --- "Add data stream lifecycle": - skip: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was released in 8.14" features: allowed_warnings @@ -130,7 +130,7 @@ setup: --- "Get data stream lifecycle with default rollover": - skip: - cluster_features: ["datastream_lifecycle", "data_stream.lifecycle.global_retention"] + cluster_features: ["data_stream.lifecycle.global_retention"] reason: "Data stream lifecycle with effective retention was released in 8.14" features: allowed_warnings @@ -161,7 +161,7 @@ setup: --- "Reject data stream lifecycle without data stream configuration": - skip: - cluster_features: ["datastream_lifecycle"] + cluster_features: ["gte_v8.11.0"] reason: "Data stream lifecycle in index templates was updated after 8.10" - do: catch: bad_request diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index ef121411d8351..75d282d524607 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -64,9 +64,9 @@ --- "Put mappings with explicit _doc type": - - skip: - version: " - 7.99.99" - reason: "deprecation message changed in 8.0" + - requires: + cluster_features: ["gte_v8.0.0"] + reason: "deprecation message changed in 8.0" - do: indices.create: index: test_index @@ -87,9 +87,10 @@ --- "Put mappings with explicit _doc type bwc": - skip: - version: "8.0.0 - " + cluster_features: [ "gte_v8.0.0"] reason: "old deprecation message for pre 8.0" - features: "node_selector" + - requires: + test_runner_features: ["node_selector"] - do: indices.create: index: test_index @@ -112,8 +113,8 @@ --- "Update per-field metadata": - - skip: - version: " - 7.5.99" + - requires: + cluster_features: ["gte_v7.6.0"] reason: "Per-field meta was introduced in 7.6" - do: @@ -146,8 +147,8 @@ --- "disabling synthetic source fails": - - skip: - version: " - 8.3.99" + - requires: + cluster_features: ["gte_v8.4.0"] reason: "Added in 8.4.0" - do: @@ -168,8 +169,8 @@ --- "enabling synthetic source from explicit succeeds": - - skip: - version: " - 8.3.99" + - requires: + cluster_features: [ "gte_v8.4.0" ] reason: "Added in 8.4.0" - do: @@ -189,8 +190,8 @@ --- "enabling synthetic source succeeds": - - skip: - version: " - 8.3.99" + - requires: + cluster_features: [ "gte_v8.4.0" ] reason: "Added in 8.4.0" - do: @@ -215,8 +216,8 @@ --- "enabling synthetic source when no mapping succeeds": - - skip: - version: " - 8.3.99" + - requires: + cluster_features: [ "gte_v8.4.0" ] reason: "Added in 8.4.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml index 0c4f90ea73343..a0c66bc568fa9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.update_aliases/40_must_exist.yml @@ -84,8 +84,8 @@ must_exist: true --- "Partial success with must_exist == false": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "alias action results do not work until 8.14" - do: indices.create: @@ -110,8 +110,8 @@ - match: { action_results.1.error.type: aliases_not_found_exception } --- "Partial success with must_exist == null (default)": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "alias action results do not work until 8.14" - do: indices.create: @@ -135,8 +135,8 @@ - match: { action_results.1.error.type: aliases_not_found_exception } --- "No action_results field if all actions successful": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "alias action results do not work until 8.14" - do: indices.create: @@ -152,8 +152,8 @@ - match: { action_results: null } --- "Single result per input action": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "alias action results do not work until 8.14" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml index ae21e68b160f5..b2efe16add78f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml @@ -1,7 +1,7 @@ --- "Noop": - - skip: - version: " - 7.3.99" + - requires: + cluster_features: [ "gte_v7.4.0" ] reason: "Noop does not return seq_no and primary_term until 7.4" - do: index: diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index a32679d445629..dfecc0bdeb3c7 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.test.ClasspathUtils; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.TestFeatureService; @@ -195,11 +194,6 @@ public void initAndResetContext() throws Exception { restTestExecutionContext.clear(); } - @Override - protected List createAdditionalFeatureSpecifications() { - return List.of(new YamlTestLegacyFeatures()); - } - /** * Create the test execution context. Can be overwritten in sub-implementations of the test if the context needs to be modified. */ diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java deleted file mode 100644 index 2ce35888c3f14..0000000000000 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.test.rest.yaml; - -import org.elasticsearch.Version; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -/** - * This class groups historical features that have been removed from the production codebase, but are still used by YAML test - * to support BwC. Rather than leaving them in the main src we group them here, so it's clear they are not used in production code anymore. - */ -public class YamlTestLegacyFeatures implements FeatureSpecification { - private static final NodeFeature BULK_AUTO_ID = new NodeFeature("bulk_auto_id"); - private static final NodeFeature BULK_REQUIRE_ALIAS = new NodeFeature("bulk_require_alias"); - private static final NodeFeature BULK_DYNAMIC_TEMPLATE_OP_TYPE = new NodeFeature("bulk_dynamic_template_op_type"); - private static final NodeFeature BULK_DYNAMIC_TEMPLATE_DOCUMENT_PARSE_EXCEPTION = new NodeFeature( - "bulk_dynamic_template_document_parse_exception" - ); - - private static final NodeFeature BULK_PIPELINE_VALIDATE = new NodeFeature("bulk_pipeline_validate"); - - private static final NodeFeature CAT_ALIASES_SHOW_WRITE_INDEX = new NodeFeature("cat_aliases_show_write_index"); - private static final NodeFeature CAT_ALIASES_HIDDEN = new NodeFeature("cat_aliases_hidden"); - private static final NodeFeature CAT_ALIASES_LOCAL_DEPRECATED = new NodeFeature("cat_aliases_local_deprecated"); - - private static final NodeFeature CAT_ALLOCATION_NODE_ROLE = new NodeFeature("cat_allocation_node_role"); - - private static final NodeFeature CAT_INDICES_DATASET_SIZE = new NodeFeature("cat_indices_dataset_size"); - private static final NodeFeature CAT_INDICES_VALIDATE_HEALTH_PARAM = new NodeFeature("cat_indices_validate_health_param"); - - private static final NodeFeature CAT_PLUGINS_NEW_FORMAT = new NodeFeature("cat_plugins_new_format"); - - private static final NodeFeature CAT_RECOVERY_NEW_BYTES_FORMAT = new NodeFeature("cat_recovery_new_bytes_format"); - - private static final NodeFeature CAT_SHARDS_DATASET_SIZE = new NodeFeature("cat_shards_dataset_size"); - private static final NodeFeature CAT_SHARDS_FIX_HIDDEN_INDICES = new NodeFeature("cat_shards_fix_hidden_indices"); - - private static final NodeFeature CAT_TASKS_X_OPAQUE_ID = new NodeFeature("cat_tasks_x_opaque_id"); - - private static final NodeFeature CAT_TEMPLATE_NAME_VALIDATION = new NodeFeature("cat_template_name_validation"); - - private static final NodeFeature CLUSTER_TEMPLATES_DELETE_MULTIPLE = new NodeFeature("cluster_templates_delete_multiple"); - - private static final NodeFeature CLUSTER_ALLOCATION_ROLE = new NodeFeature("cluster_allocation_role"); - - private static final NodeFeature CLUSTER_DESIRED_BALANCE = new NodeFeature("cluster_desired_balance"); - private static final NodeFeature CLUSTER_DESIRED_BALANCE_STATS = new NodeFeature("cluster_desired_balance_stats"); - private static final NodeFeature CLUSTER_DESIRED_BALANCE_EXTENDED = new NodeFeature("cluster_desired_balance_extended"); - private static final NodeFeature CLUSTER_DESIRED_BALANCE_STATS_UNDESIRED_COUNT = new NodeFeature( - "cluster_desired_balance_stats_undesired_count" - ); - - private static final NodeFeature CLUSTER_DESIRED_NODES_OLD = new NodeFeature("cluster_desired_nodes_old"); - private static final NodeFeature CLUSTER_DESIRED_NODES_DRY_RUN = new NodeFeature("cluster_desired_nodes_dry_run"); - private static final NodeFeature CLUSTER_DESIRED_NODES_NO_SETTINGS_VALIDATION = new NodeFeature( - "cluster_desired_nodes_no_settings_validation" - ); - private static final NodeFeature CLUSTER_DESIRED_NODES = new NodeFeature("cluster_desired_nodes"); - - private static final NodeFeature CLUSTER_HEALTH_INDICES_OPTIONS = new NodeFeature("cluster_health_indices_options"); - - private static final NodeFeature CLUSTER_INFO = new NodeFeature("cluster_info"); - private static final NodeFeature CLUSTER_INFO_EXTENDED = new NodeFeature("cluster_info_extended"); - - private static final NodeFeature CLUSTER_PREVALIDATE_NODE_REMOVAL = new NodeFeature("cluster_prevalidate_node_removal"); - private static final NodeFeature CLUSTER_PREVALIDATE_NODE_REMOVAL_REASON = new NodeFeature("cluster_prevalidate_node_removal_reason"); - - private static final NodeFeature CLUSTER_STATS_PACKAGING_TYPES = new NodeFeature("cluster_stats_packaging_types"); - private static final NodeFeature CLUSTER_STATS_RUNTIME_FIELDS = new NodeFeature("cluster_stats_runtime_fields"); - private static final NodeFeature CLUSTER_STATS_INDEXING_PRESSURE = new NodeFeature("cluster_stats_indexing_pressure"); - private static final NodeFeature CLUSTER_STATS_MAPPING_SIZES = new NodeFeature("cluster_stats_mapping_sizes"); - private static final NodeFeature CLUSTER_STATS_SNAPSHOTS = new NodeFeature("cluster_stats_snapshots"); - private static final NodeFeature CLUSTER_STATS_DENSE_VECTORS = new NodeFeature("cluster_stats_dense_vectors"); - - private static final NodeFeature DATASTREAM_LIFECYCLE = new NodeFeature("datastream_lifecycle"); - - private static final NodeFeature TEMPLATES_V2 = new NodeFeature("templates_v2"); - - private static final NodeFeature INDICES_REPLICATE_CLOSED = new NodeFeature("indices_replicate_closed"); - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - Map.entry(BULK_AUTO_ID, Version.V_7_5_0), - Map.entry(BULK_REQUIRE_ALIAS, Version.V_7_10_0), - Map.entry(BULK_PIPELINE_VALIDATE, Version.V_7_9_1), - Map.entry(BULK_DYNAMIC_TEMPLATE_OP_TYPE, Version.V_8_6_1), - Map.entry(BULK_DYNAMIC_TEMPLATE_DOCUMENT_PARSE_EXCEPTION, Version.V_8_8_0), - - Map.entry(CAT_ALIASES_SHOW_WRITE_INDEX, Version.V_7_4_0), - Map.entry(CAT_ALIASES_HIDDEN, Version.V_7_7_0), - Map.entry(CAT_ALIASES_LOCAL_DEPRECATED, Version.V_8_12_0), - - Map.entry(CAT_ALLOCATION_NODE_ROLE, Version.V_8_10_0), - - Map.entry(CAT_INDICES_VALIDATE_HEALTH_PARAM, Version.V_7_8_0), - Map.entry(CAT_INDICES_DATASET_SIZE, Version.V_8_11_0), - - Map.entry(CAT_PLUGINS_NEW_FORMAT, Version.V_7_12_0), - - Map.entry(CAT_RECOVERY_NEW_BYTES_FORMAT, Version.V_8_0_0), - - Map.entry(CAT_SHARDS_FIX_HIDDEN_INDICES, Version.V_8_3_0), - Map.entry(CAT_SHARDS_DATASET_SIZE, Version.V_8_11_0), - - Map.entry(CAT_TASKS_X_OPAQUE_ID, Version.V_7_10_0), - - Map.entry(CAT_TEMPLATE_NAME_VALIDATION, Version.V_7_16_0), - - Map.entry(CLUSTER_TEMPLATES_DELETE_MULTIPLE, Version.V_8_0_0), - Map.entry(CLUSTER_ALLOCATION_ROLE, Version.V_8_11_0), - - Map.entry(CLUSTER_DESIRED_BALANCE, Version.V_8_6_0), - Map.entry(CLUSTER_DESIRED_BALANCE_STATS, Version.V_8_7_0), - Map.entry(CLUSTER_DESIRED_BALANCE_EXTENDED, Version.V_8_8_0), - Map.entry(CLUSTER_DESIRED_BALANCE_STATS_UNDESIRED_COUNT, Version.V_8_12_0), - - Map.entry(CLUSTER_DESIRED_NODES_OLD, Version.V_8_3_0), - Map.entry(CLUSTER_DESIRED_NODES_DRY_RUN, Version.V_8_4_0), - Map.entry(CLUSTER_DESIRED_NODES_NO_SETTINGS_VALIDATION, Version.V_8_10_0), - - Map.entry(CLUSTER_HEALTH_INDICES_OPTIONS, Version.V_7_2_0), - - Map.entry(CLUSTER_INFO, Version.V_8_8_0), - Map.entry(CLUSTER_INFO_EXTENDED, Version.V_8_9_0), - - Map.entry(CLUSTER_PREVALIDATE_NODE_REMOVAL, Version.V_8_6_0), - Map.entry(CLUSTER_PREVALIDATE_NODE_REMOVAL_REASON, Version.V_8_7_0), - - Map.entry(CLUSTER_STATS_PACKAGING_TYPES, Version.V_7_2_0), - Map.entry(CLUSTER_STATS_RUNTIME_FIELDS, Version.V_7_13_0), - Map.entry(CLUSTER_STATS_INDEXING_PRESSURE, Version.V_8_1_0), - Map.entry(CLUSTER_STATS_MAPPING_SIZES, Version.V_8_4_0), - Map.entry(CLUSTER_STATS_SNAPSHOTS, Version.V_8_8_0), - Map.entry(CLUSTER_STATS_DENSE_VECTORS, Version.V_8_10_0), - - Map.entry(DATASTREAM_LIFECYCLE, Version.V_8_11_0), - - Map.entry(INDICES_REPLICATE_CLOSED, Version.V_7_2_0), - - Map.entry(TEMPLATES_V2, Version.V_7_8_0) - ); - } -} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/50_data_stream.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/50_data_stream.yml index d7c4cf91002c2..bc47a41ab4eb5 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/50_data_stream.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/50_data_stream.yml @@ -56,8 +56,8 @@ teardown: --- "enrich documents over _bulk via a data stream": - - skip: - version: " - 8.13.99" + - requires: + cluster_features: [ "gte_v8.14.0" ] reason: "enrich didn't support data streams until 8.14.0+" - do: From 0a7b723a6695fa1a6536fdd40312efe9ffb230a1 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Mon, 15 Apr 2024 08:13:04 -0400 Subject: [PATCH 010/130] Add Index Metadata Map To Query Rewrite Context (#107075) Add resolved indices info to QueryRewriteContext, which will be used by the upcoming semantic query. --- .../search/basic/QueryRewriteContextIT.java | 268 ++++++++++++++++++ .../elasticsearch/action/ResolvedIndices.java | 238 ++++++++++++++++ .../query/TransportValidateQueryAction.java | 16 +- .../explain/TransportExplainAction.java | 16 +- .../action/search/TransportSearchAction.java | 158 +++++------ .../search/TransportSearchShardsAction.java | 30 +- .../org/elasticsearch/index/IndexService.java | 3 +- .../query/CoordinatorRewriteContext.java | 1 + .../index/query/QueryRewriteContext.java | 35 ++- .../index/query/SearchExecutionContext.java | 3 +- .../elasticsearch/indices/IndicesService.java | 5 +- .../elasticsearch/search/SearchService.java | 5 +- .../search/TransportSearchActionTests.java | 52 ++-- .../action/MockResolvedIndices.java | 34 +++ .../test/AbstractBuilderTestCase.java | 26 +- .../ml/integration/TextEmbeddingQueryIT.java | 4 +- .../ml/integration/TextExpansionQueryIT.java | 2 + 17 files changed, 765 insertions(+), 131 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/basic/QueryRewriteContextIT.java create mode 100644 server/src/main/java/org/elasticsearch/action/ResolvedIndices.java create mode 100644 test/framework/src/main/java/org/elasticsearch/action/MockResolvedIndices.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/QueryRewriteContextIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/QueryRewriteContextIT.java new file mode 100644 index 0000000000000..8197b5b8bdd48 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/QueryRewriteContextIT.java @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.basic; + +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.ResolvedIndices; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; +import org.elasticsearch.action.explain.ExplainRequestBuilder; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.ClosePointInTimeResponse; +import org.elasticsearch.action.search.OpenPointInTimeRequest; +import org.elasticsearch.action.search.OpenPointInTimeResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.TransportClosePointInTimeAction; +import org.elasticsearch.action.search.TransportOpenPointInTimeAction; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class QueryRewriteContextIT extends ESIntegTestCase { + private static class TestQueryBuilder extends AbstractQueryBuilder { + private static final String NAME = "test"; + + private static TestQueryBuilder fromXContent(XContentParser parser) { + return new TestQueryBuilder(); + } + + TestQueryBuilder() {} + + TestQueryBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.endObject(); + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + return new MatchNoDocsQuery(); + } + + @Override + protected boolean doEquals(TestQueryBuilder other) { + return true; + } + + @Override + protected int doHashCode() { + return 0; + } + } + + public static class TestPlugin extends Plugin implements SearchPlugin { + public TestPlugin() {} + + @Override + public List> getQueries() { + return List.of(new QuerySpec(TestQueryBuilder.NAME, TestQueryBuilder::new, TestQueryBuilder::fromXContent)); + } + } + + @Override + protected Collection> nodePlugins() { + return List.of(TestPlugin.class); + } + + public void testResolvedIndices_TransportSearchAction() { + final String[] indices = { "test1", "test2" }; + createIndex(indices); + + assertAcked(indicesAdmin().prepareAliases().addAlias(indices, "alias")); + assertResolvedIndices(prepareSearch(indices), Set.of(indices), Set.of(indices), r -> {}); + assertResolvedIndices(prepareSearch("test*"), Set.of("test*"), Set.of(indices), r -> {}); + assertResolvedIndices(prepareSearch("alias"), Set.of("alias"), Set.of(indices), r -> {}); + + final String pointInTimeId = openPointInTime(indices, TimeValue.timeValueMinutes(2)); + try { + final PointInTimeBuilder pointInTimeBuilder = new PointInTimeBuilder(pointInTimeId); + assertResolvedIndices(prepareSearch().setPointInTime(pointInTimeBuilder), Set.of(indices), Set.of(indices), r -> {}); + + assertAcked(indicesAdmin().prepareDelete("test2")); + assertResolvedIndices(prepareSearch().setPointInTime(pointInTimeBuilder), Set.of(indices), Set.of("test1"), r -> {}); + } finally { + closePointInTime(pointInTimeId); + } + + } + + public void testResolvedIndices_TransportExplainAction() { + final String[] indices = { "test1", "test2" }; + createIndex(indices); + assertAcked(indicesAdmin().prepareAliases().addAlias("test1", "alias1")); + assertAcked(indicesAdmin().prepareAliases().addAlias(indices, "alias2")); + + assertResolvedIndices(client().prepareExplain("test1", "1"), Set.of("test1"), Set.of("test1"), r -> {}); + assertResolvedIndices(client().prepareExplain("alias1", "1"), Set.of("alias1"), Set.of("test1"), r -> {}); + assertRequestBuilderThrows(client().prepareExplain("alias2", "1"), IllegalArgumentException.class, RestStatus.BAD_REQUEST); + } + + public void testResolvedIndices_TransportValidateQueryAction() { + final String[] indices = { "test1", "test2" }; + createIndex(indices); + assertAcked(indicesAdmin().prepareAliases().addAlias(indices, "alias")); + + Consumer responseAssertions = r -> { + assertThat(r.getStatus(), equalTo(RestStatus.OK)); + assertThat(r.isValid(), is(true)); + }; + + assertResolvedIndices( + client().admin().indices().prepareValidateQuery(indices), + Set.of(indices), + Set.of(indices), + responseAssertions + ); + assertResolvedIndices( + client().admin().indices().prepareValidateQuery("test*"), + Set.of("test*"), + Set.of(indices), + responseAssertions + ); + assertResolvedIndices( + client().admin().indices().prepareValidateQuery("alias"), + Set.of("alias"), + Set.of(indices), + responseAssertions + ); + } + + private String openPointInTime(String[] indices, TimeValue keepAlive) { + OpenPointInTimeRequest request = new OpenPointInTimeRequest(indices).keepAlive(keepAlive); + OpenPointInTimeResponse response = client().execute(TransportOpenPointInTimeAction.TYPE, request).actionGet(); + return response.getPointInTimeId(); + } + + private void closePointInTime(String pointInTimeId) { + ClosePointInTimeResponse response = client().execute( + TransportClosePointInTimeAction.TYPE, + new ClosePointInTimeRequest(pointInTimeId) + ).actionGet(); + assertThat(response.status(), is(RestStatus.OK)); + } + + private static void assertResolvedIndices( + ActionRequestBuilder requestBuilder, + @Nullable Set expectedLocalIndices, + Set expectedConcreteLocalIndices, + Consumer responseAssertions + ) { + AtomicBoolean gotQueryRewriteContext = new AtomicBoolean(false); + TestQueryBuilder testQueryBuilder = new TestQueryBuilder() { + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + // Check that the first QueryRewriteContext received has the expected resolved indices. + // Later QueryRewriteContext instances received, such as the one generated in the can-match phase, will have resolved + // indices set to null. + if (queryRewriteContext.getClass() == QueryRewriteContext.class && gotQueryRewriteContext.getAndSet(true) == false) { + ResolvedIndices resolvedIndices = queryRewriteContext.getResolvedIndices(); + assertThat(resolvedIndices, notNullValue()); + + OriginalIndices localIndices = resolvedIndices.getLocalIndices(); + if (expectedLocalIndices != null) { + assertThat(localIndices, notNullValue()); + assertThat(Set.of(localIndices.indices()), equalTo(expectedLocalIndices)); + } else { + assertThat(localIndices, nullValue()); + } + + assertThat( + Arrays.stream(resolvedIndices.getConcreteLocalIndices()).map(Index::getName).collect(Collectors.toSet()), + equalTo(expectedConcreteLocalIndices) + ); + + Map indexMetadataMap = resolvedIndices.getConcreteLocalIndicesMetadata(); + assertThat(indexMetadataMap.size(), equalTo(expectedConcreteLocalIndices.size())); + indexMetadataMap.forEach((k, v) -> { + assertThat(expectedConcreteLocalIndices.contains(k.getName()), is(true)); + assertThat(v, notNullValue()); + }); + } + + return super.doRewrite(queryRewriteContext); + } + }; + + setQuery(requestBuilder, testQueryBuilder); + assertResponse(requestBuilder, responseAssertions); + assertThat(gotQueryRewriteContext.get(), is(true)); + } + + private static void setQuery( + ActionRequestBuilder requestBuilder, + QueryBuilder queryBuilder + ) { + if (requestBuilder instanceof SearchRequestBuilder searchRequestBuilder) { + searchRequestBuilder.setQuery(queryBuilder); + } else if (requestBuilder instanceof ExplainRequestBuilder explainRequestBuilder) { + explainRequestBuilder.setQuery(queryBuilder); + } else if (requestBuilder instanceof ValidateQueryRequestBuilder validateQueryRequestBuilder) { + validateQueryRequestBuilder.setQuery(queryBuilder); + } else { + throw new AssertionError("Unexpected request builder type [" + requestBuilder.getClass() + "]"); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java b/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java new file mode 100644 index 0000000000000..2ff0b476dc60b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/ResolvedIndices.java @@ -0,0 +1,238 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.action.search.SearchContextId; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.transport.RemoteClusterService; + +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class ResolvedIndices { + @Nullable + private final SearchContextId searchContextId; + private final Map remoteClusterIndices; + @Nullable + private final OriginalIndices localIndices; + private final Map localIndexMetadata; + + ResolvedIndices( + Map remoteClusterIndices, + @Nullable OriginalIndices localIndices, + Map localIndexMetadata, + @Nullable SearchContextId searchContextId + ) { + this.remoteClusterIndices = Collections.unmodifiableMap(remoteClusterIndices); + this.localIndices = localIndices; + this.localIndexMetadata = Collections.unmodifiableMap(localIndexMetadata); + this.searchContextId = searchContextId; + } + + ResolvedIndices( + Map remoteClusterIndices, + @Nullable OriginalIndices localIndices, + Map localIndexMetadata + ) { + this(remoteClusterIndices, localIndices, localIndexMetadata, null); + } + + /** + * Get the remote cluster indices, structured as a map where the key is the remote cluster alias. + *
+ *
+ * NOTE: The returned indices are *not* guaranteed to be concrete indices that exist. + * In addition to simple concrete index names, returned index names can be any combination of the following: + *
    + *
  • Aliases
  • + *
  • Wildcards
  • + *
  • Invalid index/alias names
  • + *
+ * + * @return The remote cluster indices map + */ + public Map getRemoteClusterIndices() { + return remoteClusterIndices; + } + + /** + * Get the local cluster indices. + * If the returned value is null, no local cluster indices are referenced. + * If the returned value is an {@link OriginalIndices} instance with an empty or null {@link OriginalIndices#indices()} array, + * potentially all local cluster indices are referenced, depending on if {@link OriginalIndices#indicesOptions()} is configured to + * expand wildcards. + *
+ *
+ * NOTE: The returned indices are *not* guaranteed to be concrete indices that exist. + * In addition to simple concrete index names, returned index names can be any combination of the following: + *
    + *
  • Aliases
  • + *
  • Wildcards
  • + *
  • Invalid index/alias names
  • + *
+ * + * @return The local cluster indices + */ + @Nullable + public OriginalIndices getLocalIndices() { + return localIndices; + } + + /** + * Get metadata for concrete local cluster indices. + * All indices returned are guaranteed to be concrete indices that exist. + * + * @return Metadata for concrete local cluster indices + */ + public Map getConcreteLocalIndicesMetadata() { + return localIndexMetadata; + } + + /** + * Get the concrete local cluster indices. + * All indices returned are guaranteed to be concrete indices that exist. + * + * @return The concrete local cluster indices + */ + public Index[] getConcreteLocalIndices() { + return localIndexMetadata.keySet().toArray(Index[]::new); + } + + /** + * Get the search context ID. + * Returns a non-null value only when the instance is created using + * {@link ResolvedIndices#resolveWithPIT(PointInTimeBuilder, IndicesOptions, ClusterState, NamedWriteableRegistry)}. + * + * @return The search context ID + */ + @Nullable + public SearchContextId getSearchContextId() { + return searchContextId; + } + + /** + * Create a new {@link ResolvedIndices} instance from an {@link IndicesRequest}. + * + * @param request The indices request + * @param clusterState The cluster state + * @param indexNameExpressionResolver The index name expression resolver used to resolve concrete local indices + * @param remoteClusterService The remote cluster service used to group remote cluster indices + * @param startTimeInMillis The request start time in milliseconds + * @return a new {@link ResolvedIndices} instance + */ + public static ResolvedIndices resolveWithIndicesRequest( + IndicesRequest request, + ClusterState clusterState, + IndexNameExpressionResolver indexNameExpressionResolver, + RemoteClusterService remoteClusterService, + long startTimeInMillis + ) { + final Map remoteClusterIndices = remoteClusterService.groupIndices( + request.indicesOptions(), + request.indices() + ); + final OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + + Index[] concreteLocalIndices = localIndices == null + ? Index.EMPTY_ARRAY + : indexNameExpressionResolver.concreteIndices(clusterState, localIndices, startTimeInMillis); + + return new ResolvedIndices(remoteClusterIndices, localIndices, resolveLocalIndexMetadata(concreteLocalIndices, clusterState, true)); + } + + /** + * Create a new {@link ResolvedIndices} instance from a {@link PointInTimeBuilder}. + * + * @param pit The point-in-time builder + * @param indicesOptions The indices options to propagate to the new {@link ResolvedIndices} instance + * @param clusterState The cluster state + * @param namedWriteableRegistry The named writeable registry used to decode the search context ID + * @return a new {@link ResolvedIndices} instance + */ + public static ResolvedIndices resolveWithPIT( + PointInTimeBuilder pit, + IndicesOptions indicesOptions, + ClusterState clusterState, + NamedWriteableRegistry namedWriteableRegistry + ) { + final SearchContextId searchContextId = pit.getSearchContextId(namedWriteableRegistry); + final Map> indicesFromSearchContext = new HashMap<>(); + for (var entry : searchContextId.shards().entrySet()) { + String clusterAlias = entry.getValue().getClusterAlias(); + if (clusterAlias == null) { + clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + } + + indicesFromSearchContext.computeIfAbsent(clusterAlias, s -> new HashSet<>()).add(entry.getKey().getIndex()); + } + + OriginalIndices localIndices; + Index[] concreteLocalIndices; + Set localIndicesSet = indicesFromSearchContext.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + if (localIndicesSet != null) { + concreteLocalIndices = localIndicesSet.toArray(Index[]::new); + localIndices = new OriginalIndices(localIndicesSet.stream().map(Index::getName).toArray(String[]::new), indicesOptions); + } else { + concreteLocalIndices = Index.EMPTY_ARRAY; + // Set localIndices to null because a non-null value with a null or 0-length indices array will be resolved to all indices by + // IndexNameExpressionResolver + localIndices = null; + } + + Map remoteClusterIndices = new HashMap<>(); + for (var entry : indicesFromSearchContext.entrySet()) { + OriginalIndices originalIndices = new OriginalIndices( + entry.getValue().stream().map(Index::getName).toArray(String[]::new), + indicesOptions + ); + remoteClusterIndices.put(entry.getKey(), originalIndices); + } + + // Don't fail on missing indices to handle point-in-time requests that reference deleted indices + return new ResolvedIndices( + remoteClusterIndices, + localIndices, + resolveLocalIndexMetadata(concreteLocalIndices, clusterState, false), + searchContextId + ); + } + + private static Map resolveLocalIndexMetadata( + Index[] concreteLocalIndices, + ClusterState clusterState, + boolean failOnMissingIndex + ) { + Map localIndexMetadata = new HashMap<>(); + for (Index index : concreteLocalIndices) { + IndexMetadata indexMetadata = clusterState.metadata().index(index); + if (indexMetadata == null) { + if (failOnMissingIndex) { + throw new IndexNotFoundException(index); + } + continue; + } + + localIndexMetadata.put(index, indexMetadata); + } + + return localIndexMetadata; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index d4832fa0d14e1..1c3d168b8889d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; @@ -38,6 +39,7 @@ import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -55,6 +57,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction< ShardValidateQueryResponse> { private final SearchService searchService; + private final RemoteClusterService remoteClusterService; @Inject public TransportValidateQueryAction( @@ -75,12 +78,23 @@ public TransportValidateQueryAction( transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); this.searchService = searchService; + this.remoteClusterService = transportService.getRemoteClusterService(); } @Override protected void doExecute(Task task, ValidateQueryRequest request, ActionListener listener) { request.nowInMillis = System.currentTimeMillis(); LongSupplier timeProvider = () -> request.nowInMillis; + + // Indices are resolved twice (they are resolved again later by the base class), but that's ok for this action type + ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndicesRequest( + request, + clusterService.state(), + indexNameExpressionResolver, + remoteClusterService, + request.nowInMillis + ); + ActionListener rewriteListener = ActionListener.wrap(rewrittenQuery -> { request.query(rewrittenQuery); super.doExecute(task, request, listener); @@ -107,7 +121,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener if (request.query() == null) { rewriteListener.onResponse(request.query()); } else { - Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider), rewriteListener); + Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider, resolvedIndices), rewriteListener); } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index d2d7a945520c1..bece9922f3e46 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; @@ -36,6 +37,7 @@ import org.elasticsearch.search.rescore.Rescorer; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -51,6 +53,7 @@ public class TransportExplainAction extends TransportSingleShardAction TYPE = new ActionType<>("indices:data/read/explain"); private final SearchService searchService; + private final RemoteClusterService remoteClusterService; @Inject public TransportExplainAction( @@ -72,11 +75,22 @@ public TransportExplainAction( threadPool.executor(ThreadPool.Names.GET) ); this.searchService = searchService; + this.remoteClusterService = transportService.getRemoteClusterService(); } @Override protected void doExecute(Task task, ExplainRequest request, ActionListener listener) { request.nowInMillis = System.currentTimeMillis(); + + // Indices are resolved twice (they are resolved again later by the base class), but that's ok for this action type + ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndicesRequest( + request, + clusterService.state(), + indexNameExpressionResolver, + remoteClusterService, + request.nowInMillis + ); + ActionListener rewriteListener = listener.delegateFailureAndWrap((l, rewrittenQuery) -> { request.query(rewrittenQuery); super.doExecute(task, request, l); @@ -84,7 +98,7 @@ protected void doExecute(Task task, ExplainRequest request, ActionListener request.nowInMillis; - Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider), rewriteListener); + Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider, resolvedIndices), rewriteListener); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 2255681e275b9..3abb74ddd8ec5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.RemoteClusterActionType; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; @@ -50,7 +51,6 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -101,7 +101,6 @@ import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.function.LongSupplier; -import java.util.stream.Collectors; import java.util.stream.StreamSupport; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; @@ -322,32 +321,42 @@ void executeRequest( relativeStartNanos, System::nanoTime ); + + final ClusterState clusterState = clusterService.state(); + clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); + + final ResolvedIndices resolvedIndices; + if (original.pointInTimeBuilder() != null) { + resolvedIndices = ResolvedIndices.resolveWithPIT( + original.pointInTimeBuilder(), + original.indicesOptions(), + clusterState, + namedWriteableRegistry + ); + } else { + resolvedIndices = ResolvedIndices.resolveWithIndicesRequest( + original, + clusterState, + indexNameExpressionResolver, + remoteClusterService, + timeProvider.absoluteStartMillis() + ); + frozenIndexCheck(resolvedIndices); + } + ActionListener rewriteListener = listener.delegateFailureAndWrap((delegate, rewritten) -> { - final SearchContextId searchContext; - // key to map is clusterAlias - final Map remoteClusterIndices; if (ccsCheckCompatibility) { checkCCSVersionCompatibility(rewritten); } - if (rewritten.pointInTimeBuilder() != null) { - searchContext = rewritten.pointInTimeBuilder().getSearchContextId(namedWriteableRegistry); - remoteClusterIndices = getIndicesFromSearchContexts(searchContext, rewritten.indicesOptions()); - } else { - searchContext = null; - remoteClusterIndices = remoteClusterService.groupIndices(rewritten.indicesOptions(), rewritten.indices()); - } - OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - final ClusterState clusterState = clusterService.state(); - if (remoteClusterIndices.isEmpty()) { + if (resolvedIndices.getRemoteClusterIndices().isEmpty()) { executeLocalSearch( task, timeProvider, rewritten, - localIndices, + resolvedIndices, clusterState, SearchResponse.Clusters.EMPTY, - searchContext, searchPhaseProvider.apply(delegate) ); } else { @@ -358,12 +367,12 @@ void executeRequest( ? searchService.aggReduceContextBuilder(task::isCancelled, rewritten.source().aggregations()) : null; SearchResponse.Clusters clusters = new SearchResponse.Clusters( - localIndices, - remoteClusterIndices, + resolvedIndices.getLocalIndices(), + resolvedIndices.getRemoteClusterIndices(), true, remoteClusterService::isSkipUnavailable ); - if (localIndices == null) { + if (resolvedIndices.getLocalIndices() == null) { // Notify the progress listener that a CCS with minimize_roundtrips is happening remote-only (no local shards) task.getProgressListener() .notifyListShards(Collections.emptyList(), Collections.emptyList(), clusters, false, timeProvider); @@ -372,8 +381,7 @@ void executeRequest( task, parentTaskId, rewritten, - localIndices, - remoteClusterIndices, + resolvedIndices, clusters, timeProvider, aggregationReduceContextBuilder, @@ -384,20 +392,21 @@ void executeRequest( task, timeProvider, r, - localIndices, + resolvedIndices, clusterState, clusters, - searchContext, searchPhaseProvider.apply(l) ) ); } else { + final SearchContextId searchContext = resolvedIndices.getSearchContextId(); SearchResponse.Clusters clusters = new SearchResponse.Clusters( - localIndices, - remoteClusterIndices, + resolvedIndices.getLocalIndices(), + resolvedIndices.getRemoteClusterIndices(), false, remoteClusterService::isSkipUnavailable ); + // TODO: pass parentTaskId collectSearchShards( rewritten.indicesOptions(), @@ -406,7 +415,7 @@ void executeRequest( rewritten.source() != null ? rewritten.source().query() : null, Objects.requireNonNullElse(rewritten.allowPartialSearchResults(), searchService.defaultAllowPartialSearchResults()), searchContext, - remoteClusterIndices, + resolvedIndices.getRemoteClusterIndices(), clusters, timeProvider, transportService, @@ -422,7 +431,7 @@ void executeRequest( searchShardsResponses, searchContext, rewritten.pointInTimeBuilder().getKeepAlive(), - remoteClusterIndices + resolvedIndices.getRemoteClusterIndices() ); } else { remoteAliasFilters = new HashMap<>(); @@ -431,7 +440,7 @@ void executeRequest( } remoteShardIterators = getRemoteShardsIterator( searchShardsResponses, - remoteClusterIndices, + resolvedIndices.getRemoteClusterIndices(), remoteAliasFilters ); } @@ -439,13 +448,12 @@ void executeRequest( task, timeProvider, rewritten, - localIndices, + resolvedIndices, remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, clusters, - searchContext, searchPhaseProvider.apply(finalDelegate) ); }) @@ -453,7 +461,12 @@ void executeRequest( } } }); - Rewriteable.rewriteAndFetch(original, searchService.getRewriteContext(timeProvider::absoluteStartMillis), rewriteListener); + + Rewriteable.rewriteAndFetch( + original, + searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices), + rewriteListener + ); } static void adjustSearchType(SearchRequest searchRequest, boolean singleShard) { @@ -507,8 +520,7 @@ static void ccsRemoteReduce( SearchTask task, TaskId parentTaskId, SearchRequest searchRequest, - OriginalIndices localIndices, - Map remoteIndices, + ResolvedIndices resolvedIndices, SearchResponse.Clusters clusters, SearchTimeProvider timeProvider, AggregationReduceContext.Builder aggReduceContextBuilder, @@ -518,10 +530,10 @@ static void ccsRemoteReduce( BiConsumer> localSearchConsumer ) { final var remoteClientResponseExecutor = threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION); - if (localIndices == null && remoteIndices.size() == 1) { + if (resolvedIndices.getLocalIndices() == null && resolvedIndices.getRemoteClusterIndices().size() == 1) { // if we are searching against a single remote cluster, we simply forward the original search request to such cluster // and we directly perform final reduction in the remote cluster - Map.Entry entry = remoteIndices.entrySet().iterator().next(); + Map.Entry entry = resolvedIndices.getRemoteClusterIndices().entrySet().iterator().next(); String clusterAlias = entry.getKey(); boolean skipUnavailable = remoteClusterService.isSkipUnavailable(clusterAlias); OriginalIndices indices = entry.getValue(); @@ -592,9 +604,9 @@ public void onFailure(Exception e) { () -> createSearchResponseMerger(searchRequest.source(), timeProvider, aggReduceContextBuilder) ); final AtomicReference exceptions = new AtomicReference<>(); - int totalClusters = remoteIndices.size() + (localIndices == null ? 0 : 1); + int totalClusters = resolvedIndices.getRemoteClusterIndices().size() + (resolvedIndices.getLocalIndices() == null ? 0 : 1); final CountDown countDown = new CountDown(totalClusters); - for (Map.Entry entry : remoteIndices.entrySet()) { + for (Map.Entry entry : resolvedIndices.getRemoteClusterIndices().entrySet()) { String clusterAlias = entry.getKey(); boolean skipUnavailable = remoteClusterService.isSkipUnavailable(clusterAlias); OriginalIndices indices = entry.getValue(); @@ -623,7 +635,7 @@ public void onFailure(Exception e) { ); remoteClusterClient.execute(TransportSearchAction.REMOTE_TYPE, ccsSearchRequest, ccsListener); } - if (localIndices != null) { + if (resolvedIndices.getLocalIndices() != null) { ActionListener ccsListener = createCCSListener( RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, false, @@ -637,7 +649,7 @@ public void onFailure(Exception e) { SearchRequest ccsLocalSearchRequest = SearchRequest.subSearchRequest( parentTaskId, searchRequest, - localIndices.indices(), + resolvedIndices.getLocalIndices().indices(), RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, timeProvider.absoluteStartMillis(), false @@ -914,23 +926,21 @@ void executeLocalSearch( Task task, SearchTimeProvider timeProvider, SearchRequest searchRequest, - OriginalIndices localIndices, + ResolvedIndices resolvedIndices, ClusterState clusterState, SearchResponse.Clusters clusterInfo, - SearchContextId searchContext, SearchPhaseProvider searchPhaseProvider ) { executeSearch( (SearchTask) task, timeProvider, searchRequest, - localIndices, + resolvedIndices, Collections.emptyList(), (clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), clusterInfo, - searchContext, searchPhaseProvider ); } @@ -1049,23 +1059,16 @@ private static boolean checkAllRemotePITShardsWereReturnedBySearchShards( .allMatch(searchContextIdForNode -> searchContextIdForNode.getClusterAlias() == null); } - Index[] resolveLocalIndices(OriginalIndices localIndices, ClusterState clusterState, SearchTimeProvider timeProvider) { - if (localIndices == null) { - return Index.EMPTY_ARRAY; // don't search on any local index (happens when only remote indices were specified) - } - - List frozenIndices = null; - Index[] indices = indexNameExpressionResolver.concreteIndices(clusterState, localIndices, timeProvider.absoluteStartMillis()); - for (Index index : indices) { - IndexMetadata indexMetadata = clusterState.metadata().index(index); - if (indexMetadata.getSettings().getAsBoolean("index.frozen", false)) { - if (frozenIndices == null) { - frozenIndices = new ArrayList<>(); - } - frozenIndices.add(index.getName()); + void frozenIndexCheck(ResolvedIndices resolvedIndices) { + List frozenIndices = new ArrayList<>(); + Map indexMetadataMap = resolvedIndices.getConcreteLocalIndicesMetadata(); + for (var entry : indexMetadataMap.entrySet()) { + if (entry.getValue().getSettings().getAsBoolean("index.frozen", false)) { + frozenIndices.add(entry.getKey().getName()); } } - if (frozenIndices != null) { + + if (frozenIndices.isEmpty() == false) { DEPRECATION_LOGGER.warn( DeprecationCategory.INDICES, "search-frozen-indices", @@ -1073,23 +1076,20 @@ Index[] resolveLocalIndices(OriginalIndices localIndices, ClusterState clusterSt String.join(",", frozenIndices) ); } - return indices; } private void executeSearch( SearchTask task, SearchTimeProvider timeProvider, SearchRequest searchRequest, - OriginalIndices localIndices, + ResolvedIndices resolvedIndices, List remoteShardIterators, BiFunction remoteConnections, ClusterState clusterState, Map remoteAliasMap, SearchResponse.Clusters clusters, - @Nullable SearchContextId searchContext, SearchPhaseProvider searchPhaseProvider ) { - clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); if (searchRequest.allowPartialSearchResults() == null) { // No user preference defined in search request - apply cluster service default searchRequest.allowPartialSearchResults(searchService.defaultAllowPartialSearchResults()); @@ -1102,20 +1102,20 @@ private void executeSearch( final Map aliasFilter; final String[] concreteLocalIndices; - if (searchContext != null) { + if (resolvedIndices.getSearchContextId() != null) { assert searchRequest.pointInTimeBuilder() != null; - aliasFilter = searchContext.aliasFilter(); - concreteLocalIndices = localIndices == null ? new String[0] : localIndices.indices(); + aliasFilter = resolvedIndices.getSearchContextId().aliasFilter(); + concreteLocalIndices = resolvedIndices.getLocalIndices() == null ? new String[0] : resolvedIndices.getLocalIndices().indices(); localShardIterators = getLocalLocalShardsIteratorFromPointInTime( clusterState, - localIndices, + searchRequest.indicesOptions(), searchRequest.getLocalClusterAlias(), - searchContext, + resolvedIndices.getSearchContextId(), searchRequest.pointInTimeBuilder().getKeepAlive(), searchRequest.allowPartialSearchResults() ); } else { - final Index[] indices = resolveLocalIndices(localIndices, clusterState, timeProvider); + final Index[] indices = resolvedIndices.getConcreteLocalIndices(); concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new); final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices); @@ -1581,22 +1581,9 @@ private static RemoteTransportException wrapRemoteClusterFailure(String clusterA return new RemoteTransportException("error while communicating with remote cluster [" + clusterAlias + "]", e); } - static Map getIndicesFromSearchContexts(SearchContextId searchContext, IndicesOptions indicesOptions) { - final Map> indices = new HashMap<>(); - for (Map.Entry entry : searchContext.shards().entrySet()) { - String clusterAlias = entry.getValue().getClusterAlias() == null - ? RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY - : entry.getValue().getClusterAlias(); - indices.computeIfAbsent(clusterAlias, k -> new HashSet<>()).add(entry.getKey().getIndexName()); - } - return indices.entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> new OriginalIndices(e.getValue().toArray(String[]::new), indicesOptions))); - } - static List getLocalLocalShardsIteratorFromPointInTime( ClusterState clusterState, - OriginalIndices originalIndices, + IndicesOptions indicesOptions, String localClusterAlias, SearchContextId searchContext, TimeValue keepAlive, @@ -1629,10 +1616,7 @@ static List getLocalLocalShardsIteratorFromPointInTime( throw e; } } - OriginalIndices finalIndices = new OriginalIndices( - new String[] { shardId.getIndexName() }, - originalIndices.indicesOptions() - ); + OriginalIndices finalIndices = new OriginalIndices(new String[] { shardId.getIndexName() }, indicesOptions); iterators.add( new SearchShardIterator( localClusterAlias, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index 60efb910a5269..c3eea1fe557e7 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.RemoteClusterActionType; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +28,6 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; @@ -101,21 +100,24 @@ protected void doExecute(Task task, SearchShardsRequest searchShardsRequest, Act relativeStartNanos, System::nanoTime ); - ClusterState clusterState = clusterService.state(); + + final ClusterState clusterState = clusterService.state(); + final ResolvedIndices resolvedIndices = ResolvedIndices.resolveWithIndicesRequest( + searchShardsRequest, + clusterState, + indexNameExpressionResolver, + remoteClusterService, + timeProvider.absoluteStartMillis() + ); + if (resolvedIndices.getRemoteClusterIndices().isEmpty() == false) { + throw new UnsupportedOperationException("search_shards API doesn't support remote indices " + searchShardsRequest); + } + Rewriteable.rewriteAndFetch( original, - searchService.getRewriteContext(timeProvider::absoluteStartMillis), + searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices), listener.delegateFailureAndWrap((delegate, searchRequest) -> { - Map groupedIndices = remoteClusterService.groupIndices( - searchRequest.indicesOptions(), - searchRequest.indices() - ); - OriginalIndices originalIndices = groupedIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - if (groupedIndices.isEmpty() == false) { - throw new UnsupportedOperationException("search_shards API doesn't support remote indices " + searchRequest); - } - // TODO: Move a share stuff out of the TransportSearchAction. - Index[] concreteIndices = transportSearchAction.resolveLocalIndices(originalIndices, clusterState, timeProvider); + Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices(); final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); final Map aliasFilters = transportSearchAction.buildIndexAliasFilters( clusterState, diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index a3980599e0e1a..3a8d3250c5628 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -724,7 +724,8 @@ public QueryRewriteContext newQueryRewriteContext( namedWriteableRegistry, valuesSourceRegistry, allowExpensiveQueries, - scriptService + scriptService, + null ); } diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index 91305af55dcf5..2a1062f8876d2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -50,6 +50,7 @@ public CoordinatorRewriteContext( null, null, null, + null, null ); this.indexLongFieldRange = indexLongFieldRange; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index f314d4d08de5f..81adfee36f923 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.query; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -60,6 +61,7 @@ public class QueryRewriteContext { protected boolean allowUnmappedFields; protected boolean mapUnmappedFieldAsString; protected Predicate allowedFields; + private final ResolvedIndices resolvedIndices; public QueryRewriteContext( final XContentParserConfiguration parserConfiguration, @@ -74,7 +76,8 @@ public QueryRewriteContext( final NamedWriteableRegistry namedWriteableRegistry, final ValuesSourceRegistry valuesSourceRegistry, final BooleanSupplier allowExpensiveQueries, - final ScriptCompiler scriptService + final ScriptCompiler scriptService, + final ResolvedIndices resolvedIndices ) { this.parserConfiguration = parserConfiguration; @@ -91,6 +94,7 @@ public QueryRewriteContext( this.valuesSourceRegistry = valuesSourceRegistry; this.allowExpensiveQueries = allowExpensiveQueries; this.scriptService = scriptService; + this.resolvedIndices = resolvedIndices; } public QueryRewriteContext(final XContentParserConfiguration parserConfiguration, final Client client, final LongSupplier nowInMillis) { @@ -107,10 +111,35 @@ public QueryRewriteContext(final XContentParserConfiguration parserConfiguration null, null, null, + null, null ); } + public QueryRewriteContext( + final XContentParserConfiguration parserConfiguration, + final Client client, + final LongSupplier nowInMillis, + final ResolvedIndices resolvedIndices + ) { + this( + parserConfiguration, + client, + nowInMillis, + null, + MappingLookup.EMPTY, + Collections.emptyMap(), + null, + null, + null, + null, + null, + null, + null, + resolvedIndices + ); + } + /** * The registry used to build new {@link XContentParser}s. Contains registered named parsers needed to parse the query. * @@ -357,4 +386,8 @@ public Iterable> getAllFields() { // runtime mappings and non-runtime fields don't overlap, so we can simply concatenate the iterables here return () -> Iterators.concat(allEntrySet.iterator(), runtimeEntrySet.iterator()); } + + public ResolvedIndices getResolvedIndices() { + return resolvedIndices; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index cdd31f40dcfc6..59453356f0389 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -260,7 +260,8 @@ private SearchExecutionContext( namedWriteableRegistry, valuesSourceRegistry, allowExpensiveQueries, - scriptService + scriptService, + null ); this.shardId = shardId; this.shardRequestIndex = shardRequestIndex; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 026a20415aa91..5dda4912eaad5 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -18,6 +18,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; @@ -1723,8 +1724,8 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set remoteIndicesByCluster + ) { + return new MockResolvedIndices(remoteIndicesByCluster, localIndices, Map.of()); + } + public void testMergeShardsIterators() { Index[] indices = new Index[randomIntBetween(1, 10)]; for (int i = 0; i < indices.length; i++) { @@ -490,6 +501,8 @@ public void testCCSRemoteReduceMergeFails() throws Exception { boolean local = randomBoolean(); OriginalIndices localIndices = local ? new OriginalIndices(new String[] { "index" }, SearchRequest.DEFAULT_INDICES_OPTIONS) : null; TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); + ResolvedIndices mockResolvedIndices = createMockResolvedIndices(localIndices, remoteIndicesByCluster); + try ( MockTransportService service = MockTransportService.createNewService( settings, @@ -518,8 +531,7 @@ public void testCCSRemoteReduceMergeFails() throws Exception { task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -560,6 +572,8 @@ public void testCCSRemoteReduce() throws Exception { OriginalIndices localIndices = local ? new OriginalIndices(new String[] { "index" }, SearchRequest.DEFAULT_INDICES_OPTIONS) : null; int totalClusters = numClusters + (local ? 1 : 0); TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); + ResolvedIndices mockResolvedIndices = createMockResolvedIndices(localIndices, remoteIndicesByCluster); + try ( MockTransportService service = MockTransportService.createNewService( settings, @@ -591,8 +605,7 @@ public void testCCSRemoteReduce() throws Exception { task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -648,8 +661,7 @@ public void testCCSRemoteReduce() throws Exception { task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -702,6 +714,8 @@ public void testCCSRemoteReduceWhereRemoteClustersFail() throws Exception { boolean local = randomBoolean(); OriginalIndices localIndices = local ? new OriginalIndices(new String[] { "index" }, SearchRequest.DEFAULT_INDICES_OPTIONS) : null; TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); + ResolvedIndices mockResolvedIndices = createMockResolvedIndices(localIndices, remoteIndicesByCluster); + try ( MockTransportService service = MockTransportService.createNewService( settings, @@ -731,8 +745,7 @@ public void testCCSRemoteReduceWhereRemoteClustersFail() throws Exception { task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -774,6 +787,8 @@ public void testCCSRemoteReduceWithDisconnectedRemoteClusters() throws Exception OriginalIndices localIndices = local ? new OriginalIndices(new String[] { "index" }, SearchRequest.DEFAULT_INDICES_OPTIONS) : null; int totalClusters = numClusters + (local ? 1 : 0); TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); + ResolvedIndices mockResolvedIndices = createMockResolvedIndices(localIndices, remoteIndicesByCluster); + try ( MockTransportService service = MockTransportService.createNewService( settings, @@ -825,8 +840,7 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -877,8 +891,7 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -951,8 +964,7 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti task, parentTaskId, searchRequest, - localIndices, - remoteIndicesByCluster, + mockResolvedIndices, new SearchResponse.Clusters(localIndices, remoteIndicesByCluster, true, alias -> randomBoolean()), timeProvider, emptyReduceContextBuilder(), @@ -1624,7 +1636,7 @@ public void testLocalShardIteratorFromPointInTime() { final List shardIterators = TransportSearchAction.getLocalLocalShardsIteratorFromPointInTime( clusterState, - OriginalIndices.NONE, + null, null, new SearchContextId(contexts, aliasFilterMap), keepAlive, @@ -1669,7 +1681,7 @@ public void testLocalShardIteratorFromPointInTime() { IndexNotFoundException error = expectThrows(IndexNotFoundException.class, () -> { TransportSearchAction.getLocalLocalShardsIteratorFromPointInTime( clusterState, - OriginalIndices.NONE, + null, null, new SearchContextId(contexts, aliasFilterMap), keepAlive, @@ -1680,7 +1692,7 @@ public void testLocalShardIteratorFromPointInTime() { // Ok when some indices don't exist and `allowPartialSearchResults` is true. Optional anotherShardIterator = TransportSearchAction.getLocalLocalShardsIteratorFromPointInTime( clusterState, - OriginalIndices.NONE, + null, null, new SearchContextId(contexts, aliasFilterMap), keepAlive, @@ -1717,13 +1729,15 @@ protected void doWriteTo(StreamOutput out) throws IOException { NodeClient client = new NodeClient(settings, threadPool); SearchService searchService = mock(SearchService.class); - when(searchService.getRewriteContext(any())).thenReturn(new QueryRewriteContext(null, null, null)); + when(searchService.getRewriteContext(any(), any())).thenReturn(new QueryRewriteContext(null, null, null)); ClusterService clusterService = new ClusterService( settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool, null ); + clusterService.getClusterApplierService().setInitialState(ClusterState.EMPTY_STATE); + TransportSearchAction action = new TransportSearchAction( threadPool, new NoneCircuitBreakerService(), @@ -1733,7 +1747,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { null, clusterService, actionFilters, - null, + new IndexNameExpressionResolver(threadPool.getThreadContext(), EmptySystemIndices.INSTANCE), null, null, new SearchTransportAPMMetrics(TelemetryProvider.NOOP.getMeterRegistry()), diff --git a/test/framework/src/main/java/org/elasticsearch/action/MockResolvedIndices.java b/test/framework/src/main/java/org/elasticsearch/action/MockResolvedIndices.java new file mode 100644 index 0000000000000..a200b776312f7 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/action/MockResolvedIndices.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.action.search.SearchContextId; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.index.Index; + +import java.util.Map; + +public class MockResolvedIndices extends ResolvedIndices { + public MockResolvedIndices( + Map remoteClusterIndices, + OriginalIndices localIndices, + Map localIndexMetadata, + SearchContextId searchContextId + ) { + super(remoteClusterIndices, localIndices, localIndexMetadata, searchContextId); + } + + public MockResolvedIndices( + Map remoteClusterIndices, + OriginalIndices localIndices, + Map localIndexMetadata + ) { + super(remoteClusterIndices, localIndices, localIndexMetadata); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index a5c0d4dbc6544..606bf35d58f14 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -14,9 +14,13 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.MockResolvedIndices; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; @@ -405,6 +409,7 @@ private static class ServiceHolder implements Closeable { private final ScriptService scriptService; private final Client client; private final long nowInMillis; + private final IndexMetadata indexMetadata; ServiceHolder( Settings nodeSettings, @@ -549,6 +554,16 @@ public void onRemoval(ShardId shardId, Accountable accountable) { }""", OBJECT_FIELD_NAME, DATE_FIELD_NAME, INT_FIELD_NAME)), MapperService.MergeReason.MAPPING_UPDATE); testCase.initializeAdditionalMappings(mapperService); } + + indexMetadata = IndexMetadata.builder(index.getName()) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()) + ) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); } public static Predicate indexNameMatcher() { @@ -600,7 +615,8 @@ QueryRewriteContext createQueryRewriteContext() { namedWriteableRegistry, null, () -> true, - scriptService + scriptService, + createMockResolvedIndices() ); } @@ -629,5 +645,13 @@ public ScriptEngine getScriptEngine(Settings settings, Collection textExpansionSearch(indexName, "the machine is leaking", modelId, "ml.tokens")); assertThat(e.getMessage(), containsString("[missing-model] is not an inference service model or a deployed ml model")); } From 7d4920b878c561ceb533eb47ada87e74d90efefc Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 15 Apr 2024 15:05:26 +0200 Subject: [PATCH 011/130] Throw RequestedRangeNotSatisfiedException when BlobContainer.readBlob starts reading after blob length (#107408) This change introduces a specialized exception RequestedRangeNotSatisfiedException that is thrown when FsBlobContainer or S3BlobContainer try to read a range of bytes from a position that is located after the real length of the blob. This exception can then be caught to detect such situation and acts accordingly. --- .../s3/S3RepositoryThirdPartyTests.java | 11 ++-- .../s3/S3RetryingInputStream.java | 15 ++---- .../s3/S3RetryingInputStreamTests.java | 54 ++++++++++++++++--- .../common/blobstore/fs/FsBlobContainer.java | 7 ++- .../RequestedRangeNotSatisfiedException.java | 50 +++++++++++++++++ .../blobstore/fs/FsBlobContainerTests.java | 39 ++++++++++++++ 6 files changed, 154 insertions(+), 22 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/repositories/blobstore/RequestedRangeNotSatisfiedException.java diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index 085e357da5ae9..5064910723ab6 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.AbstractThirdPartyRepositoryTestCase; import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.blobstore.RequestedRangeNotSatisfiedException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.fixtures.minio.MinioTestContainer; @@ -243,15 +244,15 @@ public void testReadFromPositionLargerThanBlobLength() { var exception = expectThrows(UncategorizedExecutionException.class, () -> readBlob(repository, blobName, position, length)); assertThat(exception.getCause(), instanceOf(ExecutionException.class)); - assertThat(exception.getCause().getCause(), instanceOf(IOException.class)); + assertThat(exception.getCause().getCause(), instanceOf(RequestedRangeNotSatisfiedException.class)); assertThat( exception.getCause().getCause().getMessage(), containsString( - "Requested range [start=" + "Requested range [position=" + position - + ", end=" - + (position + length - 1L) - + ", currentOffset=0] cannot be satisfied for blob object [" + + ", length=" + + length + + "] cannot be satisfied for [" + repository.basePath().buildAsString() + blobName + ']' diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index d08ff5eefd20f..df5f3a2c93d28 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -20,6 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.repositories.blobstore.RequestedRangeNotSatisfiedException; import org.elasticsearch.repositories.s3.S3BlobStore.Operation; import org.elasticsearch.rest.RestStatus; @@ -109,16 +110,10 @@ private void openStreamWithRetry() throws IOException { } if (amazonS3Exception.getStatusCode() == RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()) { throw addSuppressedExceptions( - new IOException( - "Requested range [start=" - + start - + ", end=" - + end - + ", currentOffset=" - + currentOffset - + "] cannot be satisfied for blob object [" - + blobKey - + ']', + new RequestedRangeNotSatisfiedException( + blobKey, + currentStreamFirstOffset, + (end < Long.MAX_VALUE - 1) ? end - currentStreamFirstOffset + 1 : end, amazonS3Exception ) ); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java index f43fb8cfa4ed3..fbfdea8109485 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectInputStream; @@ -16,6 +17,8 @@ import org.apache.http.client.methods.HttpGet; import org.elasticsearch.common.io.Streams; import org.elasticsearch.core.Nullable; +import org.elasticsearch.repositories.blobstore.RequestedRangeNotSatisfiedException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; @@ -23,7 +26,9 @@ import java.util.Arrays; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -81,24 +86,61 @@ public void testRangeInputStreamIsAborted() throws IOException { assertThat(stream.isAborted(), is(true)); } + public void testReadAfterBlobLengthThrowsRequestedRangeNotSatisfiedException() throws IOException { + final byte[] bytes = randomByteArrayOfLength(randomIntBetween(1, 512)); + { + final int position = bytes.length + randomIntBetween(0, 100); + final int length = randomIntBetween(1, 100); + var exception = expectThrows(RequestedRangeNotSatisfiedException.class, () -> { + try (var ignored = createInputStream(bytes, position, length)) { + fail(); + } + }); + assertThat(exception.getResource(), equalTo("_blob")); + assertThat(exception.getPosition(), equalTo((long) position)); + assertThat(exception.getLength(), equalTo((long) length)); + assertThat( + exception.getMessage(), + startsWith("Requested range [position=" + position + ", length=" + length + "] cannot be satisfied for [_blob]") + ); + } + { + int position = randomIntBetween(0, Math.max(0, bytes.length - 1)); + int maxLength = bytes.length - position; + int length = randomIntBetween(maxLength + 1, Integer.MAX_VALUE - 1); + try (var stream = createInputStream(bytes, position, length)) { + assertThat(Streams.consumeFully(stream), equalTo((long) maxLength)); + } + } + } + private S3RetryingInputStream createInputStream(final byte[] data, @Nullable final Integer position, @Nullable final Integer length) throws IOException { - final S3Object s3Object = new S3Object(); final AmazonS3 client = mock(AmazonS3.class); - when(client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); final AmazonS3Reference clientReference = mock(AmazonS3Reference.class); when(clientReference.client()).thenReturn(client); final S3BlobStore blobStore = mock(S3BlobStore.class); when(blobStore.clientReference()).thenReturn(clientReference); if (position != null && length != null) { + if (data.length <= position) { + var amazonS3Exception = new AmazonS3Exception("test"); + amazonS3Exception.setStatusCode(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()); + when(client.getObject(any(GetObjectRequest.class))).thenThrow(amazonS3Exception); + return new S3RetryingInputStream(randomPurpose(), blobStore, "_blob", position, Math.addExact(position, length - 1)); + } + + final S3Object s3Object = new S3Object(); s3Object.getObjectMetadata().setContentLength(length); s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data, position, length), new HttpGet())); + when(client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); return new S3RetryingInputStream(randomPurpose(), blobStore, "_blob", position, Math.addExact(position, length - 1)); - } else { - s3Object.getObjectMetadata().setContentLength(data.length); - s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data), new HttpGet())); - return new S3RetryingInputStream(randomPurpose(), blobStore, "_blob"); } + + final S3Object s3Object = new S3Object(); + s3Object.getObjectMetadata().setContentLength(data.length); + s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data), new HttpGet())); + when(client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); + return new S3RetryingInputStream(randomPurpose(), blobStore, "_blob"); } } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 749773cd91eb8..aea24b7020a02 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.repositories.blobstore.RequestedRangeNotSatisfiedException; import java.io.Closeable; import java.io.FileNotFoundException; @@ -197,7 +198,11 @@ public InputStream readBlob(OperationPurpose purpose, String blobName, long posi assert BlobContainer.assertPurposeConsistency(purpose, blobName); final SeekableByteChannel channel = Files.newByteChannel(path.resolve(blobName)); if (position > 0L) { - assert position < channel.size() : "reading from " + position + " exceeds file length " + channel.size(); + if (channel.size() <= position) { + try (channel) { + throw new RequestedRangeNotSatisfiedException(blobName, position, length); + } + } channel.position(position); } assert channel.position() == position; diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/RequestedRangeNotSatisfiedException.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/RequestedRangeNotSatisfiedException.java new file mode 100644 index 0000000000000..834a64ce056ac --- /dev/null +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/RequestedRangeNotSatisfiedException.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.blobstore; + +import org.elasticsearch.common.Strings; + +import java.io.IOException; + +public class RequestedRangeNotSatisfiedException extends IOException { + + private final String resource; + private final long position; + private final long length; + + public RequestedRangeNotSatisfiedException(String resource, long position, long length) { + super(message(resource, position, length)); + this.resource = resource; + this.position = position; + this.length = length; + } + + public RequestedRangeNotSatisfiedException(String resource, long position, long length, Throwable cause) { + super(message(resource, position, length), cause); + this.resource = resource; + this.position = position; + this.length = length; + } + + public String getResource() { + return resource; + } + + public long getPosition() { + return position; + } + + public long getLength() { + return length; + } + + private static String message(String resource, long position, long length) { + return Strings.format("Requested range [position=%d, length=%d] cannot be satisfied for [%s]", position, length, resource); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java index 67712af9ef57b..16213f1f761e4 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.PathUtilsForTesting; +import org.elasticsearch.repositories.blobstore.RequestedRangeNotSatisfiedException; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; @@ -95,6 +96,44 @@ public void testReadBlobRangeCorrectlySkipBytes() throws IOException { } } + public void testReadAfterBlobLengthThrowsRequestedRangeNotSatisfiedException() throws IOException { + final var blobName = "blob"; + final byte[] blobData = randomByteArrayOfLength(randomIntBetween(1, frequently() ? 512 : 1 << 20)); // rarely up to 1mb + + final Path path = PathUtils.get(createTempDir().toString()); + Files.write(path.resolve(blobName), blobData); + + final FsBlobContainer container = new FsBlobContainer( + new FsBlobStore(randomIntBetween(1, 8) * 1024, path, true), + BlobPath.EMPTY, + path + ); + + { + long position = randomLongBetween(blobData.length, Long.MAX_VALUE - 1L); + long length = randomLongBetween(1L, Long.MAX_VALUE - position); + var exception = expectThrows( + RequestedRangeNotSatisfiedException.class, + () -> container.readBlob(randomPurpose(), blobName, position, length) + ); + assertThat( + exception.getMessage(), + equalTo("Requested range [position=" + position + ", length=" + length + "] cannot be satisfied for [" + blobName + ']') + ); + } + + { + long position = randomLongBetween(0L, Math.max(0L, blobData.length - 1)); + long maxLength = blobData.length - position; + long length = randomLongBetween(maxLength + 1L, Long.MAX_VALUE - 1L); + try (var stream = container.readBlob(randomPurpose(), blobName, position, length)) { + assertThat(totalBytesRead.get(), equalTo(0L)); + assertThat(Streams.consumeFully(stream), equalTo(maxLength)); + assertThat(totalBytesRead.get(), equalTo(maxLength)); + } + } + } + public void testTempBlobName() { final String blobName = randomAlphaOfLengthBetween(1, 20); final String tempBlobName = FsBlobContainer.tempBlobName(blobName); From a2d6ffe712d7a87b7fe2a61324047376f33b1c6e Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 09:10:06 -0400 Subject: [PATCH 012/130] Skipping failing test (#107464) Muting https://github.com/elastic/elasticsearch/issues/107462 --- .../test/java/org/elasticsearch/index/shard/IndexShardTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index df4bde959d6ca..091673e3eb0cd 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -4001,6 +4001,7 @@ public void testFlushOnIdle() throws Exception { closeShards(shard); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107462") public void testFlushTimeExcludingWaiting() throws Exception { IndexShard shard = newStartedShard(); for (int i = 0; i < randomIntBetween(4, 10); i++) { From a0693a59fb80d0a22e976ede37c5b7bd6cb9dc76 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 09:14:39 -0400 Subject: [PATCH 013/130] Muting testStackOverflow (#107465) Muting https://github.com/elastic/elasticsearch/issues/107416 --- .../java/org/elasticsearch/ingest/common/GsubProcessorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java index 6936ad11a785a..9680b174031da 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java @@ -31,6 +31,7 @@ protected String expectedResult(String input) { return "127-0-0-1"; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107416") public void testStackOverflow() { // This tests that we rethrow StackOverflowErrors as ElasticsearchExceptions so that we don't take down the node String badRegex = "( (?=(?:[^'\"]|'[^']*'|\"[^\"]*\")*$))"; From 001680b8e71e73dcf68047132a2d8b4e45bc2023 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 09:35:08 -0400 Subject: [PATCH 014/130] Muting fetch fields with none stored_fields (#107468) Muting https://github.com/elastic/elasticsearch/issues/107466 --- .../rest-api-spec/test/search/520_fetch_fields.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml index ad74cd2ccd795..d5f8eb4b0762d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/520_fetch_fields.yml @@ -2,7 +2,7 @@ setup: - do: indices.create: - index: test + index: test body: settings: index.number_of_shards: 1 @@ -27,8 +27,8 @@ setup: - do: index: - index: test - id: "1" + index: test + id: "1" refresh: true body: stored_keyword: "stored_keyword_value" @@ -144,8 +144,8 @@ fetch _seq_no via fields: --- fetch fields with none stored_fields: - skip: - version: " - 7.99.99" - reason: "from illegal_argument_exception to action_request_validation_exception" + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/107466" - do: catch: "bad_request" @@ -153,7 +153,7 @@ fetch fields with none stored_fields: index: test body: stored_fields: _none_ - fields: [stored_keyword, keyword, stored_value, value, ignored_keyword, ignored_value, _ignored] + fields: [ stored_keyword, keyword, stored_value, value, ignored_keyword, ignored_value, _ignored ] - match: { status: 400 } - match: { error.root_cause.0.type: action_request_validation_exception } From 44c4788afb3da669c741948625271a85a880ebbc Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 15 Apr 2024 09:48:03 -0400 Subject: [PATCH 015/130] [Transform] Check node shutdown before fail (#107358) Transforms continue to run even when a node is shutting down. This may lead to a transform failing and putting itself into a failed state, which will prevent it from restarting when the node comes back online. The transform will now abort rather than fail, which puts itself into a started state. When the node comes back online, or another node in the cluster starts the transform, then the transform will pick up from its last successful saved state and checkpoint. Close #100891 --- docs/changelog/107358.yaml | 6 + .../xpack/transform/Transform.java | 10 +- .../TransformClusterStateListener.java | 16 ++- .../xpack/transform/TransformNode.java | 49 ++++++++ .../xpack/transform/TransformServices.java | 9 +- .../TransformPersistentTasksExecutor.java | 3 +- .../transform/transforms/TransformTask.java | 25 +++- .../xpack/transform/TransformNodeTests.java | 112 ++++++++++++++++++ .../ClientTransformIndexerTests.java | 13 +- .../TransformIndexerFailureHandlingTests.java | 4 +- ...IndexerFailureOnStatePersistenceTests.java | 10 +- .../TransformIndexerStateTests.java | 7 +- .../transforms/TransformIndexerTests.java | 4 +- ...TransformPersistentTasksExecutorTests.java | 2 +- .../transforms/TransformTaskTests.java | 88 +++++++++++++- 15 files changed, 328 insertions(+), 30 deletions(-) create mode 100644 docs/changelog/107358.yaml create mode 100644 x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformNode.java create mode 100644 x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformNodeTests.java diff --git a/docs/changelog/107358.yaml b/docs/changelog/107358.yaml new file mode 100644 index 0000000000000..edb6deeffd100 --- /dev/null +++ b/docs/changelog/107358.yaml @@ -0,0 +1,6 @@ +pr: 107358 +summary: Check node shutdown before fail +area: Transform +type: enhancement +issues: + - 100891 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 54a7c9ec733c2..5b6d0f5dbe608 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -255,14 +255,12 @@ public Collection createComponents(PluginServices services) { getTransformExtension().getMinFrequency() ); scheduler.start(); + var clusterStateListener = new TransformClusterStateListener(clusterService, client); + var transformNode = new TransformNode(clusterStateListener); - transformServices.set(new TransformServices(configManager, checkpointService, auditor, scheduler)); + transformServices.set(new TransformServices(configManager, checkpointService, auditor, scheduler, transformNode)); - return List.of( - transformServices.get(), - new TransformClusterStateListener(clusterService, client), - new TransformExtensionHolder(getTransformExtension()) - ); + return List.of(transformServices.get(), clusterStateListener, new TransformExtensionHolder(getTransformExtension())); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java index 970403e49c5a3..4c867616e9be0 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformClusterStateListener.java @@ -21,17 +21,21 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; +import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -class TransformClusterStateListener implements ClusterStateListener { +class TransformClusterStateListener implements ClusterStateListener, Supplier> { private static final Logger logger = LogManager.getLogger(TransformClusterStateListener.class); private final Client client; private final AtomicBoolean isIndexCreationInProgress = new AtomicBoolean(false); + private final AtomicReference clusterState = new AtomicReference<>(); TransformClusterStateListener(ClusterService clusterService, Client client) { this.client = client; @@ -46,6 +50,8 @@ public void clusterChanged(ClusterChangedEvent event) { return; } + clusterState.set(event.state()); + // The atomic flag prevents multiple simultaneous attempts to run alias creation // if there is a flurry of cluster state updates in quick succession if (event.localNodeMaster() && isIndexCreationInProgress.compareAndSet(false, true)) { @@ -102,4 +108,12 @@ private static void createAuditAliasForDataFrameBWC(ClusterState state, Client c ); } + /** + * Retrieves the saved cluster state from the most recent update. + * This differs from {@link ClusterService#state()} in that it will not throw an exception when ClusterState is null. + */ + @Override + public Optional get() { + return Optional.ofNullable(clusterState.get()); + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformNode.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformNode.java new file mode 100644 index 0000000000000..1780ebb499228 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformNode.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; + +import java.util.Optional; +import java.util.function.Supplier; + +/** + * Stateful representation of this node, relevant to the {@link org.elasticsearch.xpack.transform.transforms.TransformTask}. + * For stateless functions, see {@link org.elasticsearch.xpack.transform.transforms.TransformNodes}. + */ +public class TransformNode { + private final Supplier> clusterState; + + public TransformNode(Supplier> clusterState) { + this.clusterState = clusterState; + } + + /** + * @return an optional containing true if this node is reported as shutting down in the cluster state metadata, false if it is not + * reported as shutting down, or empty if the cluster state is missing or the local node has not been set yet. + */ + public Optional isShuttingDown() { + return clusterState.get().map(state -> { + var localId = state.nodes().getLocalNodeId(); + if (localId != null) { + return state.metadata().nodeShutdowns().contains(localId); + } else { + return null; // empty + } + }); + } + + /** + * @return the node id stored in the cluster state, or "null" if the cluster state is missing or the local node has not been set yet. + * This should behave exactly as {@link String#valueOf(Object)}. + */ + public String nodeId() { + return clusterState.get().map(ClusterState::nodes).map(DiscoveryNodes::getLocalNodeId).orElse("null"); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java index a7b5aec64bfae..4b0179a56d6f1 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java @@ -26,17 +26,20 @@ public final class TransformServices { private final TransformCheckpointService checkpointService; private final TransformAuditor auditor; private final TransformScheduler scheduler; + private final TransformNode transformNode; public TransformServices( TransformConfigManager configManager, TransformCheckpointService checkpointService, TransformAuditor auditor, - TransformScheduler scheduler + TransformScheduler scheduler, + TransformNode transformNode ) { this.configManager = Objects.requireNonNull(configManager); this.checkpointService = Objects.requireNonNull(checkpointService); this.auditor = Objects.requireNonNull(auditor); this.scheduler = Objects.requireNonNull(scheduler); + this.transformNode = transformNode; } public TransformConfigManager getConfigManager() { @@ -54,4 +57,8 @@ public TransformAuditor getAuditor() { public TransformScheduler getScheduler() { return scheduler; } + + public TransformNode getTransformNode() { + return transformNode; + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index f18414e3aaead..eb1e5034c4940 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -497,7 +497,8 @@ protected AllocatedPersistentTask createTask( transformServices.getScheduler(), auditor, threadPool, - headers + headers, + transformServices.getTransformNode() ); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index 8eecd20c95ccd..7c9a22aa9fbfe 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; @@ -68,6 +69,7 @@ public class TransformTask extends AllocatedPersistentTask implements TransformS private final TransformIndexerPosition initialPosition; private final IndexerState initialIndexerState; private final TransformContext context; + private final TransformNode transformNode; private final SetOnce indexer = new SetOnce<>(); @SuppressWarnings("this-escape") @@ -81,7 +83,8 @@ public class TransformTask extends AllocatedPersistentTask implements TransformS TransformScheduler transformScheduler, TransformAuditor auditor, ThreadPool threadPool, - Map headers + Map headers, + TransformNode transformNode ) { super(id, type, action, TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + transform.getId(), parentTask, headers); this.transform = transform; @@ -118,6 +121,7 @@ public class TransformTask extends AllocatedPersistentTask implements TransformS if (state != null) { this.context.setAuthState(state.getAuthState()); } + this.transformNode = transformNode; } public String getTransformId() { @@ -524,11 +528,26 @@ public void fail(Throwable exception, String reason, ActionListener listen return; } - logger.atError().withThrowable(exception).log("[{}] transform has failed; experienced: [{}].", transform.getId(), reason); - auditor.error(transform.getId(), reason); // We should not keep retrying. Either the task will be stopped, or started // If it is started again, it is registered again. transformScheduler.deregisterTransform(getTransformId()); + + if (transformNode.isShuttingDown().orElse(false)) { + logger.atDebug() + .withThrowable(exception) + .log( + "Aborting transform [{}]. Transform has failed while node [{}] is shutting down. Reason: [{}]", + transform.getId(), + transformNode.nodeId(), + reason + ); + markAsLocallyAborted("Node is shutting down."); + listener.onResponse(null); + return; + } + + logger.atError().withThrowable(exception).log("[{}] transform has failed; experienced: [{}].", transform.getId(), reason); + auditor.error(transform.getId(), reason); // The idea of stopping at the next checkpoint is no longer valid. Since a failed task could potentially START again, // we should set this flag to false. context.setShouldStopAtCheckpoint(false); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformNodeTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformNodeTests.java new file mode 100644 index 0000000000000..3df566fd70047 --- /dev/null +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformNodeTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; +import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; +import java.util.Optional; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class TransformNodeTests extends ESTestCase { + private static final String SHUTTING_DOWN_ID = "shuttingDownNodeId"; + private static final String NOT_SHUTTING_DOWN_ID = "notShuttingDownId"; + + /** + * When the local node is shutting down + * Then we return true + */ + public void testIsShuttingDown() { + var isShuttingDown = new TransformNode(clusterState(SHUTTING_DOWN_ID)).isShuttingDown(); + assertTrue(isShuttingDown.isPresent()); + assertTrue(isShuttingDown.get()); + } + + /** + * When the local node is not shutting down + * Then we return false + */ + public void testIsNotShuttingDown() { + var isShuttingDown = new TransformNode(clusterState(NOT_SHUTTING_DOWN_ID)).isShuttingDown(); + assertTrue(isShuttingDown.isPresent()); + assertFalse(isShuttingDown.get()); + } + + /** + * When the local node is null + * Then we return empty + */ + public void testMissingLocalId() { + var isShuttingDown = new TransformNode(clusterState(null)).isShuttingDown(); + assertFalse(isShuttingDown.isPresent()); + } + + /** + * When the cluster state is empty + * Then we return empty + */ + public void testClusterStateMissing() { + var isShuttingDown = new TransformNode(Optional::empty).isShuttingDown(); + assertFalse(isShuttingDown.isPresent()); + } + + /** + * When there is a local node + * Then return its id + */ + public void testNodeId() { + var nodeId = new TransformNode(clusterState(SHUTTING_DOWN_ID)).nodeId(); + assertThat(nodeId, equalTo(SHUTTING_DOWN_ID)); + } + + /** + * When the local node is null + * Then return "null" + */ + public void testNodeIdMissing() { + var nodeId = new TransformNode(Optional::empty).nodeId(); + assertThat(nodeId, equalTo(String.valueOf((String) null))); + } + + private Supplier> clusterState(String nodeId) { + var nodesShutdownMetadata = new NodesShutdownMetadata( + Map.of( + SHUTTING_DOWN_ID, + SingleNodeShutdownMetadata.builder() + .setNodeId(SHUTTING_DOWN_ID) + .setReason("shutdown for a unit test") + .setType(SingleNodeShutdownMetadata.Type.RESTART) + .setStartedAtMillis(randomNonNegativeLong()) + .setGracePeriod(null) + .build() + ) + ); + + var nodes = DiscoveryNodes.builder().add(DiscoveryNodeUtils.create(SHUTTING_DOWN_ID)).localNodeId(nodeId).masterNodeId(nodeId); + + if (SHUTTING_DOWN_ID.equals(nodeId) == false && nodeId != null) { + nodes.add(DiscoveryNodeUtils.create(nodeId)); + } + + var state = ClusterState.builder(ClusterName.DEFAULT) + .metadata(Metadata.builder().putCustom(NodesShutdownMetadata.TYPE, nodesShutdownMetadata).build()) + .nodes(nodes) + .build(); + + return () -> Optional.of(state); + } +} diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 017fe3d289b0c..9173e75c4737a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.TransformExtension; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; @@ -140,7 +141,8 @@ public void testPitInjection() throws InterruptedException { mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -237,7 +239,8 @@ public void testPitInjectionIfPitNotSupported() throws InterruptedException { mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -323,7 +326,8 @@ public void testDisablePit() throws InterruptedException { mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -572,7 +576,8 @@ private ClientTransformIndexer createTestIndexer(ParentTaskAssigningClient clien mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index 079ab6afe2200..fe54847af0404 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformExtension; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; @@ -137,7 +138,8 @@ static class MockedTransformIndexer extends ClientTransformIndexer { transformsConfigManager, mock(TransformCheckpointService.class), auditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ), checkpointProvider, initialState, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java index 7d5c5a41e3154..3c1c324ebdc1e 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.TransformExtension; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; @@ -230,7 +231,8 @@ public void fail(Throwable exception, String failureMessage, ActionListener(IndexerState.STOPPED), @@ -315,7 +317,8 @@ public void fail(Throwable exception, String failureMessage, ActionListener(IndexerState.STOPPED), @@ -449,7 +452,8 @@ public void fail(Throwable exception, String failureMessage, ActionListener(IndexerState.STOPPED), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index fceba25afc7fd..c4b35181ecf67 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.checkpoint.MockTimebasedCheckpointProvider; @@ -814,7 +815,8 @@ private MockedTransformIndexer createMockIndexer( transformConfigManager, mock(TransformCheckpointService.class), transformAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ); MockedTransformIndexer indexer = new MockedTransformIndexer( @@ -848,7 +850,8 @@ private MockedTransformIndexerForStatePersistenceTesting createMockIndexerForSta transformConfigManager, mock(TransformCheckpointService.class), transformAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ); MockedTransformIndexerForStatePersistenceTesting indexer = new MockedTransformIndexerForStatePersistenceTesting( diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index 279ce65be0be2..abad10b148f21 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.CheckpointProvider; import org.elasticsearch.xpack.transform.checkpoint.MockTimebasedCheckpointProvider; @@ -454,7 +455,8 @@ private MockedTransformIndexer createMockIndexer( transformConfigManager, mock(TransformCheckpointService.class), transformAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ); MockedTransformIndexer indexer = new MockedTransformIndexer( diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index b5192535e911a..c2a526181f90e 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -564,7 +564,7 @@ private TransformServices transformServices(TransformConfigManager configManager configManager, mockAuditor ); - return new TransformServices(configManager, transformCheckpointService, mockAuditor, scheduler); + return new TransformServices(configManager, transformCheckpointService, mockAuditor, scheduler, null); } private TransformPersistentTasksExecutor buildTaskExecutor(TransformServices transformServices) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java index fbf59348e8152..31bd365250e3c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.transform.DefaultTransformExtension; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.MockTransformAuditor; @@ -57,6 +58,7 @@ import java.time.Clock; import java.util.Collections; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; @@ -73,6 +75,8 @@ import static org.hamcrest.Matchers.sameInstance; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; @@ -130,7 +134,8 @@ public void testStopOnFailedTaskWithStoppedIndexer() { new TransformScheduler(clock, threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, - Collections.emptyMap() + Collections.emptyMap(), + mockTransformNode() ); TaskManager taskManager = mock(TaskManager.class); @@ -174,6 +179,12 @@ public void testStopOnFailedTaskWithStoppedIndexer() { assertEquals(state.getReason(), null); } + private TransformNode mockTransformNode() { + var transformNode = mock(TransformNode.class); + when(transformNode.isShuttingDown()).thenReturn(randomBoolean() ? Optional.of(false) : Optional.empty()); + return transformNode; + } + private TransformServices transformServices(Clock clock, TransformAuditor auditor, ThreadPool threadPool) { var transformsConfigManager = new InMemoryTransformConfigManager(); var transformsCheckpointService = new TransformCheckpointService( @@ -192,7 +203,8 @@ private TransformServices transformServices(Clock clock, TransformAuditor audito transformsConfigManager, transformsCheckpointService, auditor, - new TransformScheduler(clock, threadPool, Settings.EMPTY, TimeValue.ZERO) + new TransformScheduler(clock, threadPool, Settings.EMPTY, TimeValue.ZERO), + mock(TransformNode.class) ); } @@ -234,7 +246,8 @@ public void testStopOnFailedTaskWithoutIndexer() { new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, - Collections.emptyMap() + Collections.emptyMap(), + mockTransformNode() ); TaskManager taskManager = mock(TaskManager.class); @@ -279,6 +292,66 @@ public void testStopOnFailedTaskWithoutIndexer() { assertEquals(state.getReason(), null); } + public void testFailWhenNodeIsShuttingDown() { + var threadPool = mock(ThreadPool.class); + when(threadPool.executor("generic")).thenReturn(mock(ExecutorService.class)); + + var transformConfig = TransformConfigTests.randomTransformConfigWithoutHeaders(); + var auditor = MockTransformAuditor.createMockAuditor(); + + var transformState = new TransformState( + TransformTaskState.STARTED, + IndexerState.INDEXING, + null, + 0L, + "because", + null, + null, + false, + null + ); + + var node = mock(TransformNode.class); + when(node.isShuttingDown()).thenReturn(Optional.of(true)); + when(node.nodeId()).thenReturn("node"); + + var transformTask = new TransformTask( + 42, + "some_type", + "some_action", + TaskId.EMPTY_TASK_ID, + createTransformTaskParams(transformConfig.getId()), + transformState, + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), + auditor, + threadPool, + Collections.emptyMap(), + node + ); + + var taskManager = mock(TaskManager.class); + var persistentTasksService = mock(PersistentTasksService.class); + transformTask.init(persistentTasksService, taskManager, "task-id", 42); + var listenerCalled = new AtomicBoolean(false); + transformTask.fail(null, "because", ActionTestUtils.assertNoFailureListener(r -> { listenerCalled.compareAndSet(false, true); })); + + var state = transformTask.getState(); + assertEquals(TransformTaskState.STARTED, state.getTaskState()); + assertEquals(IndexerState.STARTED, state.getIndexerState()); + + assertTrue(listenerCalled.get()); + // verify shutdown has been called + verify(taskManager, times(1)).unregister(any()); + verify(persistentTasksService, times(1)).sendCompletionRequest( + eq("task-id"), + eq(42L), + isNull(), + eq("Node is shutting down."), + isNull(), + any() + ); + } + public void testGetTransformTask() { { ClusterState clusterState = ClusterState.EMPTY_STATE; @@ -453,7 +526,8 @@ public void testApplyNewAuthState() { new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, - Collections.emptyMap() + Collections.emptyMap(), + mockTransformNode() ); assertThat(transformTask.getContext().getAuthState().getStatus(), is(equalTo(HealthStatus.GREEN))); @@ -502,7 +576,8 @@ private TransformTask createTransformTask(TransformConfig transformConfig, MockT new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, - Collections.emptyMap() + Collections.emptyMap(), + mockTransformNode() ); } @@ -603,7 +678,8 @@ private void testTriggered(TransformTaskState taskState, IndexerState indexerSta new TransformScheduler(mock(Clock.class), threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, - Collections.emptyMap() + Collections.emptyMap(), + mockTransformNode() ); ClientTransformIndexer indexer = mock(ClientTransformIndexer.class); From d2cdee311161d08cc1833863ec4cff31ae7dc976 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 09:49:12 -0400 Subject: [PATCH 016/130] Muting Codec tests (#107470) Muting https://github.com/elastic/elasticsearch/issues/107417 --- .../src/test/java/org/elasticsearch/index/codec/CodecTests.java | 2 ++ .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index bd4aa0241cd27..5b1b63e9ce82d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -18,6 +18,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.settings.Settings; @@ -39,6 +40,7 @@ import static org.hamcrest.Matchers.instanceOf; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107417") @SuppressCodecs("*") // we test against default codec so never get a random one here! public class CodecTests extends ESTestCase { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index c194c1031ab41..41707cdfdded8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -1092,6 +1092,7 @@ public void testFloatVectorQueryBoundaries() throws IOException { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107417") public void testKnnVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); @@ -1125,6 +1126,7 @@ public void testKnnVectorsFormat() throws IOException { assertEquals(expectedString, knnVectorsFormat.toString()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107417") public void testKnnQuantizedHNSWVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); From e306dd187201e2ea8e883ed24596898fe255a286 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 15 Apr 2024 09:58:00 -0400 Subject: [PATCH 017/130] ESQL: Default ESQL version on old clients (#107438) Soon we will require an ESQL version for all ESQL requests. Kibana is sending it already. The official clients will start sending it in version 8.14+. This defaults the version for the official clients before 8.14. It does so by reading a magic header the clients send, `x-elastic-client-meta` and detecting the version string for any clients that might possibly support ESQL - `es=8.11`, `es=8.12`, and `es=8.13`. If we receive that we'll default to the first version of ESQL. I've also made this work for kibana versions 8.12 and 8.13 - discover will default to the old version of ESQL. --- .../esql/action/RestEsqlAsyncQueryAction.java | 1 + .../esql/action/RestEsqlQueryAction.java | 39 +++++ .../esql/action/RestEsqlQueryActionTests.java | 137 ++++++++++++++++++ 3 files changed, 177 insertions(+) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 678c2ca3bed5c..51baa900ce322 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -46,6 +46,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli esqlRequest = RequestXContent.parseAsync(parser); } + RestEsqlQueryAction.defaultVersionForOldClients(esqlRequest, request); LOGGER.info("Beginning execution of ESQL async query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index a03e5266bf520..d8fbe4ae35c1d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.esql.version.EsqlVersion; import java.io.IOException; import java.util.List; @@ -45,6 +46,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli esqlRequest = RequestXContent.parseSync(parser); } + defaultVersionForOldClients(esqlRequest, request); LOGGER.info("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { @@ -61,4 +63,41 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli protected Set responseParams() { return Set.of(URL_PARAM_DELIMITER, EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION); } + + static final String PRODUCT_ORIGIN = "x-elastic-product-origin"; + static final String CLIENT_META = "x-elastic-client-meta"; + + /** + * Default the {@link EsqlQueryRequest#esqlVersion()} to the oldest version + * if we can detect that the request comes from an older version of the + * official client or an older version of kibana. These versions supported + * ESQL but ESQL was not GA, so, technically we can break + * them. But it's not hugely complicated to make them work smoothly on the + * upgrade that starts to require the {@code version} field. This does + * just that. + */ + static void defaultVersionForOldClients(EsqlQueryRequest esqlRequest, RestRequest restRequest) { + if (esqlRequest.esqlVersion() != null) { + return; + } + String clientMeta = restRequest.header(CLIENT_META); + if (clientMeta == null) { + return; + } + String product = restRequest.header(PRODUCT_ORIGIN); + if ("kibana".equals(product)) { + /* + * Kibana 8.11 to 8.13 used the 8.9 version of the javascript client. + * Kibana 8.14, the version we *want* to send the versions is on the + * 8.13 version of the javascript client. + */ + if (clientMeta.contains("es=8.9")) { + esqlRequest.esqlVersion(EsqlVersion.ROCKET.versionStringWithoutEmoji()); + } + return; + } + if (clientMeta.contains("es=8.13") || clientMeta.contains("es=8.12") || clientMeta.contains("es=8.11")) { + esqlRequest.esqlVersion(EsqlVersion.ROCKET.versionStringWithoutEmoji()); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java new file mode 100644 index 0000000000000..6ee720e6a7334 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xpack.esql.version.EsqlVersion; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.action.RestEsqlQueryAction.CLIENT_META; +import static org.elasticsearch.xpack.esql.action.RestEsqlQueryAction.PRODUCT_ORIGIN; +import static org.elasticsearch.xpack.esql.action.RestEsqlQueryAction.defaultVersionForOldClients; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class RestEsqlQueryActionTests extends ESTestCase { + public void testNoVersionForNoClient() { + assertEsqlVersion(null, null, nullValue(String.class)); + } + + public void testNoVersionForAlreadySet() { + EsqlQueryRequest esqlRequest = new EsqlQueryRequest(); + esqlRequest.esqlVersion("whatever"); + FakeRestRequest restRequest = new FakeRestRequest(); + Supplier version = randomFrom( + () -> "es=8.1" + between(0, 3), // Versions we would rewrite. + () -> "es=8.1" + between(4, 9), // We wouldn't rewrite these anyway, but let's try it sometimes. + () -> "es=8." + between(0, 9) + between(0, 9), // These will rarely spit out versions we would rewrite. Either is fine. + () -> "es=" + between(0, 9) + "." + between(0, 9) + between(0, 9) + ); + restRequest.getHttpRequest().getHeaders().put(CLIENT_META, List.of("es=8.13.0")); + defaultVersionForOldClients(esqlRequest, restRequest); + assertThat(esqlRequest.esqlVersion(), equalTo("whatever")); + } + + public void testNoVersionForNewClient() { + Supplier version = randomFrom( + () -> "es=8.14", + () -> "es=8.2" + between(0, 9), + () -> "es=8." + between(3, 9) + between(0, 9), + () -> "es=9." + between(0, 9) + between(0, 9), + () -> "es=" + between(0, 9) + between(0, 9) + "." + between(0, 9) + between(0, 9) + ); + assertEsqlVersion(version.get(), randomProduct(), nullValue(String.class)); + } + + public void testAddsVersionForPython813() { + assertAddsOldest( + randomFrom( + "es=8.13.0,py=3.11.8,t=8.13.0,ur=2.2.1", // This is what the python client sent for me on 2024-4-12 + "py=3.11.8,es=8.13.0,ur=2.2.1,t=8.13.0", // This is just a jumbled version of the above + "es=8.13" // This is all we need to trigger it + ), + randomProduct() + ); + } + + public void testAddsVersionForPython812() { + assertAddsOldest( + randomFrom( + "es=8.12.0,py=3.11.8,t=8.13.0,ur=2.2.1", // This is what the python client sent for me on 2024-4-12 + "py=3.11.8,t=8.13.0,es=8.12.0,ur=2.2.1", // This is just a jumbled version of the above + "es=8.12" // This is all we need to trigger it + ), + randomProduct() + ); + } + + public void testNoVersionForKibana814() { + assertEsqlVersion("es=8.13", "kibana", nullValue(String.class)); + } + + public void testAddsVersionForKibana813() { + assertAddsOldest( + randomFrom( + "es=8.9.1p,js=20.12.2,t=8.3.3,hc=20.12.2", // This is what kibana sent on 2024-4-12 + "js=20.12.2,es=8.9.1p,t=8.3.3,hc=20.12.2", // This is just a jumbled version of the above + "es=8.9" // This is all we need to trigger it + ), + "kibana" + ); + } + + public void testAddsVersionForKibana812() { + assertAddsOldest( + randomFrom( + "es=8.9.1p,js=18.19.1,t=8.3.3,hc=18.19.1", // This is what kibana sent on 2024-4-12 + "js=18.19.1,t=8.3.3,es=8.9.1p,hc=18.19.1", // This is just a jumbled version of the above + "es=8.9" // This is all we need to trigger it + ), + "kibana" + ); + } + + public void testAddsVersionForKibana811() { + assertAddsOldest( + randomFrom( + "es=8.9.1p,js=18.18.2,t=8.3.3,hc=18.18.2", // This is what kibana sent on 2024-4-12 + "js=18.18.2,es=8.9.1p,t=8.3.3,hc=18.18.2", // This is just a jumbled version of the above + "es=8.9" // This is all we need to trigger it + ), + "kibana" + ); + } + + private void assertAddsOldest(String clientMeta, String elasticProductOrigin) { + assertEsqlVersion(clientMeta, elasticProductOrigin, equalTo(EsqlVersion.ROCKET.versionStringWithoutEmoji())); + } + + private void assertEsqlVersion(String clientMeta, String elasticProductOrigin, Matcher expectedEsqlVersion) { + EsqlQueryRequest esqlRequest = new EsqlQueryRequest(); + FakeRestRequest restRequest = new FakeRestRequest(); + if (clientMeta != null) { + restRequest.getHttpRequest().getHeaders().put(CLIENT_META, List.of(clientMeta)); + } + if (elasticProductOrigin != null) { + restRequest.getHttpRequest().getHeaders().put(PRODUCT_ORIGIN, List.of(elasticProductOrigin)); + } + defaultVersionForOldClients(esqlRequest, restRequest); + assertThat(esqlRequest.esqlVersion(), expectedEsqlVersion); + } + + /** + * Returns {@code null} or a random string that isn't {@code kibana}. + */ + private String randomProduct() { + return randomBoolean() ? null : randomAlphaOfLength(3); + } +} From 212e20304ebbd7a45fa8f726c91c9436018bac29 Mon Sep 17 00:00:00 2001 From: Philipp Kahr Date: Mon, 15 Apr 2024 16:41:46 +0200 Subject: [PATCH 018/130] Update mapping for shard_stats.total_count (#107471) * Update monitoring-es-mb.json * Incrementing stack monitoring version --- .../src/main/resources/monitoring-es-mb.json | 7 +++++++ .../xpack/monitoring/MonitoringTemplateRegistry.java | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json index 233c170890d40..315d6904deb93 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @@ -2363,6 +2363,13 @@ "properties": { "indices": { "properties": { + "shards_stats": { + "properties": { + "total_count": { + "type": long + } + } + }, "shards": { "properties": { "total": { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index 4477057b2399b..277a395471cb5 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -77,7 +77,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 15; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 16; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; From 71b276da904a46df9419d9603b10353c38c98930 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Mon, 15 Apr 2024 16:50:00 +0200 Subject: [PATCH 019/130] ESQL: Add version to docs (#107225) Docs for https://github.com/elastic/elasticsearch/pull/106824. Does not cover the [REST API specs](https://github.com/elastic/elasticsearch/blob/main/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json) as these don't cover the request body. --- .../esql/esql-async-query-api.asciidoc | 3 +- docs/reference/esql/esql-query-api.asciidoc | 9 +++- docs/reference/esql/esql-rest.asciidoc | 24 ++++++--- docs/reference/esql/esql-using.asciidoc | 4 ++ docs/reference/esql/esql-version.asciidoc | 49 +++++++++++++++++++ .../esql/multivalued-fields.asciidoc | 18 ++++--- 6 files changed, 90 insertions(+), 17 deletions(-) create mode 100644 docs/reference/esql/esql-version.asciidoc diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 6cd23fc524f96..82e7bb3cea9a5 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -24,7 +24,8 @@ POST /_query/async | SORT year | LIMIT 5 """, - "wait_for_completion_timeout": "2s" + "wait_for_completion_timeout": "2s", + "version": "2024.04.01" } ---- // TEST[setup:library] diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index d7fa25a5a8d4f..e5e0e9fda12ec 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -16,7 +16,8 @@ POST /_query | STATS MAX(page_count) BY year | SORT year | LIMIT 5 - """ + """, + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -76,7 +77,11 @@ For syntax, refer to <>. <>. `query`:: -(Required, object) {esql} query to run. For syntax, refer to <>. +(Required, string) {esql} query to run. For syntax, refer to <>. + +`version`:: +(Required, string) {esql} language version. Can be sent in short or long form, e.g. +`2024.04.01` or `2024.04.01.🚀`. See <> for details. [discrete] [role="child_attributes"] diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index de2b6dedd8776..106dba0e85dfe 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -16,7 +16,8 @@ The <> accepts an {esql} query string in the ---- POST /_query?format=txt { - "query": "FROM library | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5" + "query": "FROM library | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5", + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -55,7 +56,8 @@ POST /_query?format=txt | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5 - """ + """, + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -143,7 +145,8 @@ POST /_query?format=txt "lte": 200 } } - } + }, + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -179,7 +182,8 @@ POST /_query?format=json | SORT page_count DESC | LIMIT 5 """, - "columnar": true + "columnar": true, + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -226,7 +230,8 @@ POST /_query | EVAL birth_date = date_parse(birth_date_string) | EVAL month_of_birth = DATE_FORMAT("MMMM",birth_date) | LIMIT 5 - """ + """, + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -249,7 +254,8 @@ POST /_query | STATS count = COUNT(*) by year | WHERE count > 0 | LIMIT 5 - """ + """, + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -270,7 +276,8 @@ POST /_query | WHERE count > ? | LIMIT 5 """, - "params": [300, "Frank Herbert", 0] + "params": [300, "Frank Herbert", 0], + "version": "2024.04.01" } ---- // TEST[setup:library] @@ -304,7 +311,8 @@ POST /_query/async | SORT year | LIMIT 5 """, - "wait_for_completion_timeout": "2s" + "wait_for_completion_timeout": "2s", + "version": "2024.04.01" } ---- // TEST[setup:library] diff --git a/docs/reference/esql/esql-using.asciidoc b/docs/reference/esql/esql-using.asciidoc index 3e045163069ec..d45a7f1743d23 100644 --- a/docs/reference/esql/esql-using.asciidoc +++ b/docs/reference/esql/esql-using.asciidoc @@ -18,8 +18,12 @@ Using {esql} to query across multiple clusters. <>:: Using the <> to list and cancel {esql} queries. +<>:: +Information about {esql} language versions. + include::esql-rest.asciidoc[] include::esql-kibana.asciidoc[] include::esql-security-solution.asciidoc[] include::esql-across-clusters.asciidoc[] include::task-management.asciidoc[] +include::esql-version.asciidoc[] diff --git a/docs/reference/esql/esql-version.asciidoc b/docs/reference/esql/esql-version.asciidoc new file mode 100644 index 0000000000000..16bf1f66e3166 --- /dev/null +++ b/docs/reference/esql/esql-version.asciidoc @@ -0,0 +1,49 @@ +[[esql-version]] +=== {esql} language versions + +++++ +Language versions +++++ + +[discrete] +[[esql-versions-released]] +==== Released versions + +* Version `2024.04.01` + +[discrete] +[[esql-versions-explanation]] +=== How versions work + +{esql} language versions are independent of {es} versions. +Versioning the language ensures that your queries will always +remain valid, independent of new {es} and {esql} releases. And it lets us +evolve ESQL as we learn more from people using it. We don't plan to make +huge changes to it, but we know we've made mistakes and we don't want those +to live forever. + +For instance, the following query will remain valid, even if a future +version of {esql} introduces syntax changes or changes how the used +commands or functions work. + +[source,console] +---- +POST /_query?format=txt +{ + "version": "2024.04.01", + "query": """ + FROM library + | EVAL release_month = DATE_TRUNC(1 month, release_date) + | KEEP release_month + | SORT release_month ASC + | LIMIT 3 + """ +} +---- +// TEST[setup:library] + +We won't make breaking changes to released {esql} versions and +versions will remain supported until they are deprecated. +New features, bug fixes, and performance improvements +will be continue to be added to released {esql} versions, +provided they do not involve breaking changes. diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 871a741d5ee24..35f46db25425b 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -17,7 +17,8 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | LIMIT 2" + "query": "FROM mv | LIMIT 2", + "version": "2024.04.01" } ---- @@ -65,7 +66,8 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | LIMIT 2" + "query": "FROM mv | LIMIT 2", + "version": "2024.04.01" } ---- @@ -106,7 +108,8 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | LIMIT 2" + "query": "FROM mv | LIMIT 2", + "version": "2024.04.01" } ---- @@ -148,7 +151,8 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | EVAL b=TO_STRING(b) | LIMIT 2" + "query": "FROM mv | EVAL b=TO_STRING(b) | LIMIT 2", + "version": "2024.04.01" } ---- @@ -186,7 +190,8 @@ POST /mv/_bulk?refresh ---- POST /_query { - "query": "FROM mv | EVAL b + 2, a + b | LIMIT 4" + "query": "FROM mv | EVAL b + 2, a + b | LIMIT 4", + "version": "2024.04.01" } ---- // TEST[continued] @@ -225,7 +230,8 @@ Work around this limitation by converting the field to single value with one of: ---- POST /_query { - "query": "FROM mv | EVAL b=MV_MIN(b) | EVAL b + 2, a + b | LIMIT 4" + "query": "FROM mv | EVAL b=MV_MIN(b) | EVAL b + 2, a + b | LIMIT 4", + "version": "2024.04.01" } ---- // TEST[continued] From 22696627abb0e468b7b3389f60bbc409c20c792d Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 11:24:11 -0400 Subject: [PATCH 020/130] Muting (#107480) Muting https://github.com/elastic/elasticsearch/issues/105331 --- .../routing/allocation/decider/DiskThresholdDeciderIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index fff708bbddc1e..8499bc8aef4ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -161,6 +161,7 @@ public void testRestoreSnapshotAllocationDoesNotExceedWatermark() throws Excepti assertBusyWithDiskUsageRefresh(dataNode0Id, indexName, contains(in(shardSizes.getSmallestShardIds()))); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105331") @TestIssueLogging( value = "org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceComputer:TRACE," + "org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceReconciler:DEBUG," From 28e8197f4b344081e7eb2a503410b4760e946c6d Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 15 Apr 2024 17:26:06 +0200 Subject: [PATCH 021/130] [Connector API] Fix bug with filtering validation toXContent (#107467) --- docs/changelog/107467.yaml | 5 +++++ .../application/connector/filtering/FilteringValidation.java | 1 + .../xpack/application/connector/ConnectorFilteringTests.java | 4 ++-- 3 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107467.yaml diff --git a/docs/changelog/107467.yaml b/docs/changelog/107467.yaml new file mode 100644 index 0000000000000..e775e5928770d --- /dev/null +++ b/docs/changelog/107467.yaml @@ -0,0 +1,5 @@ +pr: 107467 +summary: "[Connector API] Fix bug with filtering validation toXContent" +area: Application +type: bug +issues: [] diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java index bb2de688f6705..43c370e315558 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java @@ -69,6 +69,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.stringListField(IDS_FIELD.getPreferredName(), ids); builder.stringListField(MESSAGES_FIELD.getPreferredName(), messages); } + builder.endObject(); return builder; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java index 8c1cdcb418142..424624b52887c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java @@ -203,8 +203,8 @@ public void testToXContent_WithAdvancedSnippetPopulatedWithAValueObject() throws } ], "validation": { - "errors": [], - "state": "valid" + "errors": [{"ids": ["1"], "messages": ["some messages"]}], + "state": "invalid" } }, "domain": "DEFAULT", From 676c89e6ed91e0b2756a22f43b2960b4e1a26a94 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 15 Apr 2024 17:26:21 +0200 Subject: [PATCH 022/130] [Connector API] Unify API JSON rest spec (#107476) --- ...lete.json => connector.secret_delete.json} | 2 +- ...ret.get.json => connector.secret_get.json} | 2 +- ...t.post.json => connector.secret_post.json} | 2 +- ...ret.put.json => connector.secret_put.json} | 2 +- ...el.json => connector.sync_job_cancel.json} | 2 +- ....json => connector.sync_job_check_in.json} | 2 +- ...te.json => connector.sync_job_delete.json} | 2 +- ...ror.json => connector.sync_job_error.json} | 2 +- ...b.get.json => connector.sync_job_get.json} | 2 +- ...list.json => connector.sync_job_list.json} | 2 +- ...post.json => connector.sync_job_post.json} | 2 +- ...n => connector.sync_job_update_stats.json} | 2 +- .../test/entsearch/320_connector_delete.yml | 16 +++--- .../entsearch/400_connector_sync_job_post.yml | 30 +++++----- .../410_connector_sync_job_delete.yml | 14 ++--- .../420_connector_sync_job_check_in.yml | 10 ++-- .../430_connector_sync_job_cancel.yml | 14 ++--- .../entsearch/440_connector_sync_job_get.yml | 14 ++--- .../450_connector_sync_job_error.yml | 6 +- .../460_connector_sync_job_update_stats.yml | 36 ++++++------ .../entsearch/470_connector_sync_job_list.yml | 56 +++++++++---------- .../entsearch/500_connector_secret_post.yml | 12 ++-- .../entsearch/505_connector_secret_put.yml | 16 +++--- .../entsearch/510_connector_secret_get.yml | 14 ++--- .../entsearch/520_connector_secret_delete.yml | 18 +++--- 25 files changed, 140 insertions(+), 140 deletions(-) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_secret.delete.json => connector.secret_delete.json} (94%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_secret.get.json => connector.secret_get.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_secret.post.json => connector.secret_post.json} (94%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_secret.put.json => connector.secret_put.json} (96%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.cancel.json => connector.sync_job_cancel.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.check_in.json => connector.sync_job_check_in.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.delete.json => connector.sync_job_delete.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.error.json => connector.sync_job_error.json} (96%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.get.json => connector.sync_job_get.json} (96%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.list.json => connector.sync_job_list.json} (97%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.post.json => connector.sync_job_post.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{connector_sync_job.update_stats.json => connector.sync_job_update_stats.json} (96%) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json similarity index 94% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json index 511e925a12e1d..b93f7ae94d2c4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_delete.json @@ -1,5 +1,5 @@ { - "connector_secret.delete": { + "connector.secret_delete": { "documentation": { "url": null, "description": "Deletes a connector secret." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.get.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json index f1037bedddfc6..a4d6f34290727 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_get.json @@ -1,5 +1,5 @@ { - "connector_secret.get": { + "connector.secret_get": { "documentation": { "url": null, "description": "Retrieves a secret stored by Connectors." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json similarity index 94% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.post.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json index 48657cf389446..ca61b9165e3b4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_post.json @@ -1,5 +1,5 @@ { - "connector_secret.post": { + "connector.secret_post": { "documentation": { "url": null, "description": "Creates a secret for a Connector." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json index 63ad6ce6e2006..f99d3f6176f3f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.secret_put.json @@ -1,5 +1,5 @@ { - "connector_secret.put": { + "connector.secret_put": { "documentation": { "url": null, "description": "Creates or updates a secret for a Connector." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json index 1e8cf154cf652..4f0729a3a11ca 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_cancel.json @@ -1,5 +1,5 @@ { - "connector_sync_job.cancel": { + "connector.sync_job_cancel": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/cancel-connector-sync-job-api.html", "description": "Cancels a connector sync job." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json index a6c96f506b115..10a1a40d616eb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_check_in.json @@ -1,5 +1,5 @@ { - "connector_sync_job.check_in": { + "connector.sync_job_check_in": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/check-in-connector-sync-job-api.html", "description": "Checks in a connector sync job (refreshes 'last_seen')." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json index 11894a48db576..591cb8f0cc695 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_delete.json @@ -1,5 +1,5 @@ { - "connector_sync_job.delete": { + "connector.sync_job_delete": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-connector-sync-job-api.html", "description": "Deletes a connector sync job." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json index c6fbd15559e2d..ea5c2a0dd0586 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_error.json @@ -1,5 +1,5 @@ { - "connector_sync_job.error": { + "connector.sync_job_error": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/set-connector-sync-job-error-api.html", "description": "Sets an error for a connector sync job." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json index 6dd29069badc4..8a46a1eecc6ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_get.json @@ -1,5 +1,5 @@ { - "connector_sync_job.get": { + "connector.sync_job_get": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-connector-sync-job-api.html", "description": "Returns the details about a connector sync job." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json similarity index 97% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json index 6b5a43d167e8f..737028ce782f8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_list.json @@ -1,5 +1,5 @@ { - "connector_sync_job.list": { + "connector.sync_job_list": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-connector-sync-jobs-api.html", "description": "Lists all connector sync jobs." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json index 8050b34014d2c..88069718dbd20 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_post.json @@ -1,5 +1,5 @@ { - "connector_sync_job.post": { + "connector.sync_job_post": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-sync-job-api.html", "description": "Creates a connector sync job." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json index d5f18df0a74da..744de17a6ad00 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.sync_job_update_stats.json @@ -1,5 +1,5 @@ { - "connector_sync_job.update_stats": { + "connector.sync_job_update_stats": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/set-connector-sync-job-stats-api.html", "description": "Updates the stats fields in the connector sync job document." diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml index 03eebb18ceefe..484c5264227f9 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml @@ -31,26 +31,26 @@ setup: "Delete Connector - deletes associated sync jobs": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector-to-delete job_type: full trigger_method: on_demand - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector-to-delete job_type: full trigger_method: on_demand - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector-to-delete job_type: full trigger_method: on_demand - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: test-connector-to-delete - match: { count: 3 } @@ -64,7 +64,7 @@ setup: - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: test-connector-to-delete - match: { count: 0 } @@ -74,14 +74,14 @@ setup: "Delete Connector - doesn't associated sync jobs when delete_sync_jobs is false": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector-to-delete job_type: full trigger_method: on_demand - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: test-connector-to-delete - match: { count: 1 } @@ -95,7 +95,7 @@ setup: - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: test-connector-to-delete - match: { count: 1 } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml index cb8b8d44b7311..54d9fe78ebaee 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml @@ -23,7 +23,7 @@ setup: --- 'Create connector sync job': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -34,7 +34,7 @@ setup: - match: { id: $id } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { connector.id: test-connector } @@ -130,7 +130,7 @@ setup: - match: { result: updated } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -141,7 +141,7 @@ setup: - match: { id: $id } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { connector.filtering.rules.0.id: RULE-ACTIVE-SYNC-JOB-TEST } @@ -191,7 +191,7 @@ setup: - match: { result: updated } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -202,7 +202,7 @@ setup: - match: { id: $id } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { connector.id: test-connector } @@ -213,7 +213,7 @@ setup: 'Create connector sync job with missing job type - expect job type full as default': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector trigger_method: on_demand @@ -223,7 +223,7 @@ setup: - match: { id: $id } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { connector.id: test-connector } @@ -241,7 +241,7 @@ setup: --- 'Create connector sync job with missing trigger method - expect trigger method on_demand as default': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -251,7 +251,7 @@ setup: - match: { id: $id } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { connector.id: test-connector } @@ -269,7 +269,7 @@ setup: --- 'Create connector sync job with non-existing connector id': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: non-existing-id job_type: full @@ -279,7 +279,7 @@ setup: --- 'Create connector sync job with invalid job type': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: invalid_job_type @@ -289,7 +289,7 @@ setup: --- 'Create connector sync job with invalid trigger method': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -300,7 +300,7 @@ setup: --- 'Create connector sync job with no index attached': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector-detached-index job_type: full @@ -315,7 +315,7 @@ setup: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # entsearch-user with read-only access - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml index 3b5a65ee3aa30..5e8ab2be9436f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml @@ -15,7 +15,7 @@ setup: --- "Delete Connector Sync Job": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -24,20 +24,20 @@ setup: - set: { id: sync-job-id-to-delete } - do: - connector_sync_job.delete: + connector.sync_job_delete: connector_sync_job_id: $sync-job-id-to-delete - match: { acknowledged: true } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $sync-job-id-to-delete catch: missing --- "Delete Connector Sync Job - Connector Sync Job does not exist": - do: - connector_sync_job.delete: + connector.sync_job_delete: connector_sync_job_id: test-nonexistent-connector-sync-job-id catch: missing @@ -47,7 +47,7 @@ setup: features: headers - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -58,10 +58,10 @@ setup: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # entsearch-user with read-only access - connector_sync_job.delete: + connector.sync_job_delete: connector_sync_job_id: $sync-job-id-to-delete - do: - connector_sync_job.delete: + connector.sync_job_delete: connector_sync_job_id: $sync-job-id-to-delete diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml index 3cada2facf689..2e0223437a836 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml @@ -16,7 +16,7 @@ setup: --- "Check in a Connector Sync Job": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -25,19 +25,19 @@ setup: - set: { id: sync-job-id-to-check-in } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $sync-job-id-to-check-in - set: { last_seen: last_seen_before_check_in } - do: - connector_sync_job.check_in: + connector.sync_job_check_in: connector_sync_job_id: $sync-job-id-to-check-in - match: { result: updated } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $sync-job-id-to-check-in - is_after: { last_seen: $last_seen_before_check_in } @@ -45,6 +45,6 @@ setup: --- "Check in a Connector Sync Job - Connector Sync Job does not exist": - do: - connector_sync_job.check_in: + connector.sync_job_check_in: connector_sync_job_id: test-nonexistent-connector-sync-job-id catch: missing diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml index b7c1560df751d..b28f1bd82d221 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml @@ -15,7 +15,7 @@ setup: --- "Cancel a pending Connector Sync Job - transition to canceled directly": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -24,13 +24,13 @@ setup: - set: { id: sync-job-id-to-cancel } - do: - connector_sync_job.cancel: + connector.sync_job_cancel: connector_sync_job_id: $sync-job-id-to-cancel - match: { result: updated } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $sync-job-id-to-cancel - set: { cancelation_requested_at: cancelation_requested_at } @@ -42,7 +42,7 @@ setup: --- "Cancel a canceled Connector Sync Job - invalid state transition from canceled to canceling": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -51,17 +51,17 @@ setup: - set: { id: sync-job-id-to-cancel } - do: - connector_sync_job.cancel: + connector.sync_job_cancel: connector_sync_job_id: $sync-job-id-to-cancel - do: catch: bad_request - connector_sync_job.cancel: + connector.sync_job_cancel: connector_sync_job_id: $sync-job-id-to-cancel --- "Cancel a Connector Sync Job - Connector Sync Job does not exist": - do: - connector_sync_job.check_in: + connector.sync_job_check_in: connector_sync_job_id: test-nonexistent-connector-sync-job-id catch: missing diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml index e42106ee1fa58..c41335adff595 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml @@ -15,7 +15,7 @@ setup: --- 'Get connector sync job': - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: access_control @@ -23,7 +23,7 @@ setup: - set: { id: id } - match: { id: $id } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { job_type: access_control } - match: { trigger_method: scheduled } @@ -31,7 +31,7 @@ setup: --- 'Get connector sync job - Missing sync job id': - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: non-existing-sync-job-id catch: missing @@ -42,7 +42,7 @@ setup: features: headers - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -52,7 +52,7 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # entsearch-user with read-only access - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $sync-job-id - match: { job_type: full } @@ -66,7 +66,7 @@ setup: features: headers - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -77,6 +77,6 @@ setup: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXBhc3N3b3Jk" } # entsearch-unprivileged user - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $sync-job-id diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml index 08751573bdd86..c8bfd606fb2ef 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml @@ -15,7 +15,7 @@ setup: --- "Set an error for a pending connector sync job - invalid state transition from pending to error": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -25,7 +25,7 @@ setup: - do: catch: bad_request - connector_sync_job.error: + connector.sync_job_error: connector_sync_job_id: $id body: error: error @@ -33,7 +33,7 @@ setup: --- "Set an error for a Connector Sync Job - Connector Sync Job does not exist": - do: - connector_sync_job.error: + connector.sync_job_error: connector_sync_job_id: test-nonexistent-connector-sync-job-id body: error: error diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml index 3353e7a8dec25..85156bf800582 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml @@ -16,7 +16,7 @@ setup: --- "Update the ingestion stats for a connector sync job - only mandatory parameters": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -25,7 +25,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: 10 @@ -35,7 +35,7 @@ setup: - match: { result: updated } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { deleted_document_count: 10 } @@ -45,7 +45,7 @@ setup: --- "Update the ingestion stats for a connector sync job - negative deleted document count error": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -54,7 +54,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: -10 @@ -65,7 +65,7 @@ setup: --- "Update the ingestion stats for a connector sync job - negative indexed document count error": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -74,7 +74,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: 10 @@ -85,7 +85,7 @@ setup: --- "Update the ingestion stats for a connector sync job - negative indexed document volume error": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -94,7 +94,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: 10 @@ -105,7 +105,7 @@ setup: --- "Update the ingestion stats for a connector sync job - negative optional total document count error": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -114,7 +114,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: 10 @@ -126,7 +126,7 @@ setup: --- "Update the ingestion stats for a connector sync job - with optional total_document_count": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -135,7 +135,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: 10 @@ -146,7 +146,7 @@ setup: - match: { result: updated } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { deleted_document_count: 10 } @@ -157,7 +157,7 @@ setup: --- "Update the ingestion stats for a connector sync job - with optional last_seen": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: test-connector job_type: full @@ -165,7 +165,7 @@ setup: - set: { id: id } - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: $id body: deleted_document_count: 10 @@ -176,7 +176,7 @@ setup: - match: { result: updated } - do: - connector_sync_job.get: + connector.sync_job_get: connector_sync_job_id: $id - match: { deleted_document_count: 10 } @@ -187,7 +187,7 @@ setup: --- "Update the ingestion stats for a Connector Sync Job - Connector Sync Job does not exist": - do: - connector_sync_job.update_stats: + connector.sync_job_update_stats: connector_sync_job_id: test-nonexistent-connector-sync-job-id body: deleted_document_count: 10 diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml index 2d4ec3271413d..1acd4136cae0b 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml @@ -25,28 +25,28 @@ setup: --- "List Connector Sync Jobs": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-three-id } - do: - connector_sync_job.list: { } + connector.sync_job_list: { } - match: { count: 3 } @@ -58,28 +58,28 @@ setup: --- "List Connector Sync Jobs - with from": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-three-id } - do: - connector_sync_job.list: + connector.sync_job_list: from: 1 - match: { count: 3 } @@ -91,28 +91,28 @@ setup: --- "List Connector Sync Jobs - with size": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-three-id } - do: - connector_sync_job.list: + connector.sync_job_list: size: 1 - match: { count: 3 } @@ -123,24 +123,24 @@ setup: --- "List Connector Sync Jobs - Get pending jobs": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.cancel: + connector.sync_job_cancel: connector_sync_job_id: $sync-job-two-id - do: - connector_sync_job.list: + connector.sync_job_list: status: pending - match: { count: 1 } - match: { results.0.id: $sync-job-one-id } @@ -148,21 +148,21 @@ setup: --- "List Connector Sync Jobs - Get jobs for connector one": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: access_control trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-two job_type: access_control trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: connector-one - match: { count: 1 } - match: { results.0.id: $sync-job-one-id } @@ -171,27 +171,27 @@ setup: "List Connector Sync Jobs - with invalid job status": - do: catch: bad_request - connector_sync_job.list: + connector.sync_job_list: status: invalid_job_status --- "List Connector Sync Jobs - Get jobs with single job type": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: full trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: incremental trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: connector-one job_type: full - match: { count: 1 } @@ -200,21 +200,21 @@ setup: --- "List Connector Sync Jobs - Get jobs with multiple job types": - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: full trigger_method: scheduled - set: { id: sync-job-one-id } - do: - connector_sync_job.post: + connector.sync_job_post: body: id: connector-one job_type: incremental trigger_method: scheduled - set: { id: sync-job-two-id } - do: - connector_sync_job.list: + connector.sync_job_list: connector_id: connector-one job_type: full,incremental @@ -227,12 +227,12 @@ setup: "List Connector Sync Jobs - with invalid job type": - do: catch: bad_request - connector_sync_job.list: + connector.sync_job_list: job_type: invalid_job_type,incremental --- "List Connector Sync Jobs - empty list": - do: - connector_sync_job.list: { } + connector.sync_job_list: { } - match: { count: 0 } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml index ed0f7b9309e08..04015448e9e39 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/500_connector_secret_post.yml @@ -6,13 +6,13 @@ setup: --- 'Post connector secret - admin': - do: - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: - connector_secret.get: + connector.secret_get: id: $id - match: { value: my-secret } @@ -23,14 +23,14 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.get: + connector.secret_get: id: $id - match: { value: my-secret } @@ -41,7 +41,7 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged - connector_secret.post: + connector.secret_post: body: value: my-secret catch: unauthorized @@ -49,7 +49,7 @@ setup: --- 'Post connector secret when id is missing should fail': - do: - connector_secret.post: + connector.secret_post: body: value: null catch: bad_request diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml index 54edeb0ab10a0..33efacd089a19 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml @@ -6,26 +6,26 @@ setup: --- 'Put connector secret - admin': - do: - connector_secret.put: + connector.secret_put: id: test-secret body: value: my-secret - match: { result: 'created' } - do: - connector_secret.get: + connector.secret_get: id: test-secret - match: { value: my-secret } - do: - connector_secret.put: + connector.secret_put: id: test-secret body: value: my-secret-2 - match: { result: 'updated' } - do: - connector_secret.get: + connector.secret_get: id: test-secret - match: { value: my-secret-2 } @@ -36,7 +36,7 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.put: + connector.secret_put: id: test-secret body: value: my-secret @@ -44,7 +44,7 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.get: + connector.secret_get: id: test-secret - match: { value: my-secret } @@ -55,7 +55,7 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged - connector_secret.put: + connector.secret_put: id: test-secret body: value: my-secret @@ -64,7 +64,7 @@ setup: --- 'Put connector secret when id is missing should fail': - do: - connector_secret.put: + connector.secret_put: id: test-secret body: value: null diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml index 62b1b4176dc4d..659dadf68710e 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/510_connector_secret_get.yml @@ -6,13 +6,13 @@ setup: --- 'Get connector secret - admin': - do: - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: - connector_secret.get: + connector.secret_get: id: $id - match: { value: my-secret } @@ -23,14 +23,14 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.get: + connector.secret_get: id: $id - match: { value: my-secret } @@ -41,20 +41,20 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged - connector_secret.get: + connector.secret_get: id: $id catch: unauthorized --- 'Get connector secret - Secret does not exist': - do: - connector_secret.get: + connector.secret_get: id: non-existing-secret-id catch: missing diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml index 04920673d6ad3..4c9baa23a5a2e 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/520_connector_secret_delete.yml @@ -6,19 +6,19 @@ setup: --- 'Delete connector secret - admin': - do: - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: - connector_secret.delete: + connector.secret_delete: id: $id - match: { deleted: true } - do: - connector_secret.get: + connector.secret_get: id: $id catch: missing @@ -29,19 +29,19 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.delete: + connector.secret_delete: id: $id - match: { deleted: true } - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.get: + connector.secret_get: id: $id catch: missing @@ -52,20 +52,20 @@ setup: - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - connector_secret.post: + connector.secret_post: body: value: my-secret - set: { id: id } - match: { id: $id } - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged - connector_secret.delete: + connector.secret_delete: id: $id catch: unauthorized --- 'Delete connector secret - Secret does not exist': - do: - connector_secret.delete: + connector.secret_delete: id: non-existing-secret-id catch: missing From 30be6c8f843338e76873e5aeed5ebc024d73edfb Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 15 Apr 2024 17:26:47 +0200 Subject: [PATCH 023/130] [Connector API] Add update filtering validation and activate draft endpoints (#107457) --- .../connector.update_active_filtering.json | 34 ++++ ...connector.update_filtering_validation.json | 38 +++++ .../332_connector_update_filtering.yml | 146 ++++++++++++++++++ .../xpack/application/EnterpriseSearch.java | 13 ++ .../connector/ConnectorIndexService.java | 106 +++++++++++++ ...tUpdateConnectorActiveFilteringAction.java | 46 ++++++ ...ateConnectorFilteringValidationAction.java | 48 ++++++ ...tUpdateConnectorActiveFilteringAction.java | 55 +++++++ ...ateConnectorFilteringValidationAction.java | 56 +++++++ .../UpdateConnectorActiveFilteringAction.java | 85 ++++++++++ ...ateConnectorFilteringValidationAction.java | 141 +++++++++++++++++ .../connector/filtering/FilteringRules.java | 1 + .../filtering/FilteringValidationInfo.java | 4 + .../connector/ConnectorIndexServiceTests.java | 119 ++++++++++++++ .../connector/ConnectorTestUtils.java | 29 ++-- ...eringActionRequestBWCSerializingTests.java | 51 ++++++ ...ationActionRequestBWCSerializingTests.java | 52 +++++++ .../xpack/security/operator/Constants.java | 2 + 18 files changed, 1016 insertions(+), 10 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorActiveFilteringAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringValidationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorActiveFilteringAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringValidationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationActionRequestBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json new file mode 100644 index 0000000000000..343791e7ac11f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_active_filtering.json @@ -0,0 +1,34 @@ +{ + "connector.update_active_filtering": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html", + "description": "Activates the draft filtering rules if they are in a validated state." + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_filtering/_activate", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json new file mode 100644 index 0000000000000..99b0a9e6116c1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering_validation.json @@ -0,0 +1,38 @@ +{ + "connector.update_filtering_validation": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html", + "description": "Updates the validation info of the draft filtering rules." + }, + "stability": "experimental", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_filtering/_validation", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "Validation info for the draft filtering rules", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml index 5734fdfe67ce8..0d52aa5d38555 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml @@ -246,3 +246,149 @@ setup: advanced_snippet: updated_at: "wrong datetime" value: { } + + +--- +"Update Connector Filtering - Update filtering draft validation": + - do: + connector.update_filtering_validation: + connector_id: test-connector + body: + validation: + state: invalid + errors: + - ids: ["1", "2"] + messages: ["some error 1", "some error 2"] + - ids: [ "3", "4" ] + messages: [ "some error 3", "some error 4" ] + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { filtering.0.draft.validation.state: invalid } + - match: { filtering.0.draft.validation.errors.0.ids.0: "1"} + - match: { filtering.0.draft.validation.errors.0.messages.1: "some error 2"} + - match: { filtering.0.draft.validation.errors.1.ids.0: "3"} + - match: { filtering.0.draft.validation.errors.1.messages.1: "some error 4"} + + +--- +"Update Connector Filtering - Update validation missing validation state": + - do: + catch: "bad_request" + connector.update_filtering_validation: + connector_id: test-connector + body: + validation: + errors: [] + +--- +"Update Connector Filtering - Update validation missing validation errors": + - do: + catch: "bad_request" + connector.update_filtering_validation: + connector_id: test-connector + body: + validation: + state: "valid" + + + +--- +"Update Connector Filtering - Activate valid draft": + + - do: + connector.update_filtering: + connector_id: test-connector + body: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: + - tables: + - some_table + query: 'SELECT id, st_geohash(coordinates) FROM my_db.some_table;' + rules: + - created_at: "2023-06-25T12:30:00.000Z" + field: _ + id: DEFAULT + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + + - match: { result: updated } + + - do: + connector.update_filtering_validation: + connector_id: test-connector + body: + validation: + state: valid + errors: [] + + - match: { result: updated } + + + - do: + connector.update_active_filtering: + connector_id: test-connector + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { filtering.0.draft.advanced_snippet.created_at: "2023-05-25T12:30:00.000Z" } + - match: { filtering.0.draft.advanced_snippet.value.0.tables.0.: "some_table" } + - match: { filtering.0.draft.rules.0.created_at: "2023-06-25T12:30:00.000Z" } + - match: { filtering.0.active.advanced_snippet.created_at: "2023-05-25T12:30:00.000Z" } + - match: { filtering.0.active.advanced_snippet.value.0.tables.0.: "some_table" } + - match: { filtering.0.active.rules.0.created_at: "2023-06-25T12:30:00.000Z" } + +--- +"Update Connector Filtering - Activate invalid draft": + + - do: + connector.update_filtering_validation: + connector_id: test-connector + body: + validation: + state: invalid + errors: [] + + - match: { result: updated } + + - do: + catch: "bad_request" + connector.update_active_filtering: + connector_id: test-connector + + +--- +"Update Connector Filtering - Activate edited draft": + + - do: + connector.update_filtering: + connector_id: test-connector + body: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: + - tables: + - some_table + query: 'SELECT id, st_geohash(coordinates) FROM my_db.some_table;' + + - match: { result: updated } + + + - do: + catch: "bad_request" + connector.update_active_filtering: + connector_id: test-connector diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 322c8994f4314..bc3da1a82fba4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -54,10 +54,12 @@ import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPostConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorActiveFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringValidationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorIndexNameAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; @@ -72,10 +74,12 @@ import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPostConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorActiveFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringValidationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorIndexNameAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; @@ -85,10 +89,12 @@ import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorServiceTypeAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorStatusAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorActiveFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringValidationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorIndexNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -262,6 +268,11 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorConfigurationAction.INSTANCE, TransportUpdateConnectorConfigurationAction.class), new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), + new ActionHandler<>(UpdateConnectorActiveFilteringAction.INSTANCE, TransportUpdateConnectorActiveFilteringAction.class), + new ActionHandler<>( + UpdateConnectorFilteringValidationAction.INSTANCE, + TransportUpdateConnectorFilteringValidationAction.class + ), new ActionHandler<>(UpdateConnectorIndexNameAction.INSTANCE, TransportUpdateConnectorIndexNameAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), @@ -356,6 +367,8 @@ public List getRestHandlers( new RestUpdateConnectorApiKeyIdAction(), new RestUpdateConnectorConfigurationAction(), new RestUpdateConnectorErrorAction(), + new RestUpdateConnectorActiveFilteringAction(), + new RestUpdateConnectorFilteringValidationAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorIndexNameAction(), new RestUpdateConnectorLastSeenAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 20b9a8ec74027..99240d6b6d49d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; @@ -644,6 +645,111 @@ public void updateConnectorFilteringDraft( } } + /** + * Updates the {@link FilteringValidationInfo} of the draft {@link ConnectorFiltering} property of a {@link Connector}. + * + * @param connectorId Request for updating {@link ConnectorFiltering}. + * @param listener Listener to respond to a successful response or an error. + */ + public void updateConnectorDraftFilteringValidation( + String connectorId, + FilteringValidationInfo validation, + ActionListener listener + ) { + getConnector(connectorId, listener.delegateFailure((l, connector) -> { + try { + List connectorFilteringList = fromXContentBytesConnectorFiltering( + connector.getSourceRef(), + XContentType.JSON + ); + // Connectors represent their filtering configuration as a singleton list + ConnectorFiltering connectorFilteringSingleton = connectorFilteringList.get(0); + + ConnectorFiltering activatedConnectorFiltering = connectorFilteringSingleton.setDraft( + new FilteringRules.Builder().setRules(connectorFilteringSingleton.getDraft().getRules()) + .setAdvancedSnippet(connectorFilteringSingleton.getDraft().getAdvancedSnippet()) + .setFilteringValidationInfo(validation) + .build() + ); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(Map.of(Connector.FILTERING_FIELD.getPreferredName(), List.of(activatedConnectorFiltering))) + ); + + client.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, l, (ll, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + ll.onResponse(updateResponse); + })); + } catch (Exception e) { + l.onFailure(e); + } + })); + + } + + /** + * Activates the draft {@link ConnectorFiltering} property of a {@link Connector}. + * + * @param connectorId Request for updating {@link ConnectorFiltering} property. + * @param listener Listener to respond to a successful response or an error. + */ + public void activateConnectorDraftFiltering(String connectorId, ActionListener listener) { + getConnector(connectorId, listener.delegateFailure((l, connector) -> { + try { + List connectorFilteringList = fromXContentBytesConnectorFiltering( + connector.getSourceRef(), + XContentType.JSON + ); + // Connectors represent their filtering configuration as a singleton list + ConnectorFiltering connectorFilteringSingleton = connectorFilteringList.get(0); + + FilteringValidationState currentValidationState = connectorFilteringSingleton.getDraft() + .getFilteringValidationInfo() + .getValidationState(); + + if (currentValidationState != FilteringValidationState.VALID) { + throw new ElasticsearchStatusException( + "Filtering draft needs to be validated by the connector service before activation. " + + "Current filtering draft validation state [" + + currentValidationState.toString() + + "] is not equal to [" + + FilteringValidationState.VALID + + "].", + RestStatus.BAD_REQUEST + ); + } + + ConnectorFiltering activatedConnectorFiltering = connectorFilteringSingleton.setActive( + connectorFilteringSingleton.getDraft() + ); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(Map.of(Connector.FILTERING_FIELD.getPreferredName(), List.of(activatedConnectorFiltering))) + ); + + client.update(updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, l, (ll, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); + return; + } + ll.onResponse(updateResponse); + })); + } catch (Exception e) { + l.onFailure(e); + } + })); + + } + /** * Updates the lastSeen property of a {@link Connector}. * diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorActiveFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorActiveFilteringAction.java new file mode 100644 index 0000000000000..fbf44487651cf --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorActiveFilteringAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +@ServerlessScope(Scope.PUBLIC) +public class RestUpdateConnectorActiveFilteringAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_active_filtering_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_filtering/_activate")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorActiveFilteringAction.Request request = new UpdateConnectorActiveFilteringAction.Request( + restRequest.param("connector_id") + ); + return channel -> client.execute( + UpdateConnectorActiveFilteringAction.INSTANCE, + request, + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringValidationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringValidationAction.java new file mode 100644 index 0000000000000..32020eea4b8b9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringValidationAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +@ServerlessScope(Scope.PUBLIC) +public class RestUpdateConnectorFilteringValidationAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_filtering_validation_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_filtering/_validation")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorFilteringValidationAction.Request request = UpdateConnectorFilteringValidationAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorFilteringValidationAction.INSTANCE, + request, + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorActiveFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorActiveFilteringAction.java new file mode 100644 index 0000000000000..d0a0103f8f7de --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorActiveFilteringAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorActiveFilteringAction extends HandledTransportAction< + UpdateConnectorActiveFilteringAction.Request, + ConnectorUpdateActionResponse> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorActiveFilteringAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorActiveFilteringAction.NAME, + transportService, + actionFilters, + UpdateConnectorActiveFilteringAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorActiveFilteringAction.Request request, + ActionListener listener + ) { + connectorIndexService.activateConnectorDraftFiltering( + request.getConnectorId(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringValidationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringValidationAction.java new file mode 100644 index 0000000000000..6b960f5fefad6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringValidationAction.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorFilteringValidationAction extends HandledTransportAction< + UpdateConnectorFilteringValidationAction.Request, + ConnectorUpdateActionResponse> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorFilteringValidationAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorFilteringValidationAction.NAME, + transportService, + actionFilters, + UpdateConnectorFilteringValidationAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorFilteringValidationAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorDraftFilteringValidation( + request.getConnectorId(), + request.getValidation(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringAction.java new file mode 100644 index 0000000000000..7b4ce08ef8320 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringAction.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class UpdateConnectorActiveFilteringAction { + + public static final String NAME = "indices:data/write/xpack/connector/update_filtering/activate"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private UpdateConnectorActiveFilteringAction() {/* no instances */} + + public static class Request extends ConnectorActionRequest implements ToXContentObject { + + private final String connectorId; + + public Request(String connectorId) { + this.connectorId = connectorId; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + + } + + public String getConnectorId() { + return connectorId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationAction.java new file mode 100644 index 0000000000000..2164019c62ba3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationAction.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorFilteringValidationAction { + + public static final String NAME = "indices:data/write/xpack/connector/update_filtering/draft_validation"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private UpdateConnectorFilteringValidationAction() {/* no instances */} + + public static class Request extends ConnectorActionRequest implements ToXContentObject { + + private final String connectorId; + private final FilteringValidationInfo validation; + + public Request(String connectorId, FilteringValidationInfo validation) { + this.connectorId = connectorId; + this.validation = validation; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.validation = new FilteringValidationInfo(in); + } + + public String getConnectorId() { + return connectorId; + } + + public FilteringValidationInfo getValidation() { + return validation; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException); + } + + if (validation == null) { + validationException = addValidationError("[validation] cannot be [null].", validationException); + } + + return validationException; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_filtering_validation", + false, + ((args, connectorId) -> new UpdateConnectorFilteringValidationAction.Request( + connectorId, + (FilteringValidationInfo) args[0] + )) + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> FilteringValidationInfo.fromXContent(p), FilteringRules.VALIDATION_FIELD); + } + + public static UpdateConnectorFilteringValidationAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorFilteringValidationAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorFilteringValidationAction.Request fromXContent(XContentParser parser, String connectorId) + throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(FilteringRules.VALIDATION_FIELD.getPreferredName(), validation); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + validation.writeTo(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(validation, request.validation); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, validation); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java index 35d18d23450b1..1fa2fbb96f0ea 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java @@ -71,6 +71,7 @@ public FilteringValidationInfo getFilteringValidationInfo() { public static final ParseField ADVANCED_SNIPPET_FIELD = new ParseField("advanced_snippet"); public static final ParseField RULES_FIELD = new ParseField("rules"); + public static final ParseField VALIDATION_FIELD = new ParseField("validation"); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java index cd197bf0538e4..0f1a11114fc55 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java @@ -53,6 +53,10 @@ public FilteringValidationInfo(StreamInput in) throws IOException { private static final ParseField ERRORS_FIELD = new ParseField("errors"); private static final ParseField STATE_FIELD = new ParseField("state"); + public FilteringValidationState getValidationState() { + return validationState; + } + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "filtering_validation_info", diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index ea510086fcf8c..0a49d4a41eba1 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringAdvancedSnippet; import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.junit.Before; import java.util.ArrayList; @@ -290,6 +291,71 @@ public void testUpdateConnectorFiltering_updateDraft() throws Exception { ); } + public void testUpdateConnectorFilteringValidation() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + FilteringValidationInfo validationInfo = ConnectorTestUtils.getRandomFilteringValidationInfo(); + + DocWriteResponse validationInfoUpdateResponse = awaitUpdateConnectorDraftFilteringValidation(connectorId, validationInfo); + assertThat(validationInfoUpdateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + + assertThat(validationInfo, equalTo(indexedConnector.getFiltering().get(0).getDraft().getFilteringValidationInfo())); + } + + public void testActivateConnectorDraftFiltering_draftValid_shouldActivate() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + // Populate draft filtering + FilteringAdvancedSnippet advancedSnippet = ConnectorTestUtils.getRandomConnectorFiltering().getDraft().getAdvancedSnippet(); + List rules = ConnectorTestUtils.getRandomConnectorFiltering().getDraft().getRules(); + + DocWriteResponse updateResponse = awaitUpdateConnectorFilteringDraft(connectorId, advancedSnippet, rules); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + FilteringValidationInfo validationSuccess = new FilteringValidationInfo.Builder().setValidationState(FilteringValidationState.VALID) + .setValidationErrors(Collections.emptyList()) + .build(); + + DocWriteResponse validationInfoUpdateResponse = awaitUpdateConnectorDraftFilteringValidation(connectorId, validationSuccess); + assertThat(validationInfoUpdateResponse.status(), equalTo(RestStatus.OK)); + + DocWriteResponse activateFilteringResponse = awaitActivateConnectorDraftFiltering(connectorId); + assertThat(activateFilteringResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + + // Assert that draft is activated + assertThat(advancedSnippet, equalTo(indexedConnector.getFiltering().get(0).getActive().getAdvancedSnippet())); + assertThat(rules, equalTo(indexedConnector.getFiltering().get(0).getActive().getRules())); + } + + public void testActivateConnectorDraftFiltering_draftNotValid_expectFailure() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + DocWriteResponse resp = buildRequestAndAwaitPutConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + FilteringValidationInfo validationFailure = new FilteringValidationInfo.Builder().setValidationState( + FilteringValidationState.INVALID + ).setValidationErrors(Collections.emptyList()).build(); + + DocWriteResponse validationInfoUpdateResponse = awaitUpdateConnectorDraftFilteringValidation(connectorId, validationFailure); + assertThat(validationInfoUpdateResponse.status(), equalTo(RestStatus.OK)); + + expectThrows(ElasticsearchStatusException.class, () -> awaitActivateConnectorDraftFiltering(connectorId)); + } + public void testUpdateConnectorLastSeen() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); @@ -775,6 +841,59 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorDraftFilteringValidation(String connectorId, FilteringValidationInfo validationInfo) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorDraftFilteringValidation(connectorId, validationInfo, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for update filtering validation request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update filtering validation request", resp.get()); + return resp.get(); + } + + private UpdateResponse awaitActivateConnectorDraftFiltering(String connectorId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.activateConnectorDraftFiltering(connectorId, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for activate draft filtering request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from activate draft filtering request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorFilteringDraft( String connectorId, FilteringAdvancedSnippet advancedSnippet, diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 876a1092a1d5b..7487dc2bb2c47 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; import org.elasticsearch.xpack.application.connector.filtering.FilteringRuleCondition; import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidation; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; @@ -179,6 +180,22 @@ public static ConnectorCustomSchedule getRandomConnectorCustomSchedule() { .build(); } + public static FilteringValidationInfo getRandomFilteringValidationInfo() { + return new FilteringValidationInfo.Builder().setValidationErrors(getRandomFilteringValidationErrors()) + .setValidationState(getRandomFilteringValidationState()) + .build(); + } + + private static List getRandomFilteringValidationErrors() { + return List.of(getRandomFilteringValidationError(), getRandomFilteringValidationError(), getRandomFilteringValidationError()); + } + + private static FilteringValidation getRandomFilteringValidationError() { + return new FilteringValidation.Builder().setIds(List.of(randomAlphaOfLength(5), randomAlphaOfLength(5))) + .setMessages(List.of(randomAlphaOfLengthBetween(10, 20), randomAlphaOfLengthBetween(15, 25))) + .build(); + } + public static ConnectorFiltering getRandomConnectorFiltering() { Instant currentTimestamp = Instant.now(); @@ -203,11 +220,7 @@ public static ConnectorFiltering getRandomConnectorFiltering() { .build() ) ) - .setFilteringValidationInfo( - new FilteringValidationInfo.Builder().setValidationErrors(Collections.emptyList()) - .setValidationState(getRandomFilteringValidationState()) - .build() - ) + .setFilteringValidationInfo(getRandomFilteringValidationInfo()) .build() ) .setDraft( @@ -230,11 +243,7 @@ public static ConnectorFiltering getRandomConnectorFiltering() { .build() ) ) - .setFilteringValidationInfo( - new FilteringValidationInfo.Builder().setValidationErrors(Collections.emptyList()) - .setValidationState(getRandomFilteringValidationState()) - .build() - ) + .setFilteringValidationInfo(getRandomFilteringValidationInfo()) .build() ) .build(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..630cf019f34da --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorActiveFilteringActionRequestBWCSerializingTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorActiveFilteringActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorActiveFilteringAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorActiveFilteringAction.Request::new; + } + + @Override + protected UpdateConnectorActiveFilteringAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorActiveFilteringAction.Request(connectorId); + } + + @Override + protected UpdateConnectorActiveFilteringAction.Request mutateInstance(UpdateConnectorActiveFilteringAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorActiveFilteringAction.Request doParseInstance(XContentParser parser) throws IOException { + return new UpdateConnectorActiveFilteringAction.Request(this.connectorId); + } + + @Override + protected UpdateConnectorActiveFilteringAction.Request mutateInstanceForVersion( + UpdateConnectorActiveFilteringAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..c8a15b164790a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringValidationActionRequestBWCSerializingTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorFilteringValidationActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorFilteringValidationAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorFilteringValidationAction.Request::new; + } + + @Override + protected UpdateConnectorFilteringValidationAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorFilteringValidationAction.Request(connectorId, ConnectorTestUtils.getRandomFilteringValidationInfo()); + } + + @Override + protected UpdateConnectorFilteringValidationAction.Request mutateInstance(UpdateConnectorFilteringValidationAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorFilteringValidationAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorFilteringValidationAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorFilteringValidationAction.Request mutateInstanceForVersion( + UpdateConnectorFilteringValidationAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 0f292d64bc4a6..126b05aec7f2f 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -134,6 +134,8 @@ public class Constants { "indices:data/write/xpack/connector/update_configuration", "indices:data/write/xpack/connector/update_error", "indices:data/write/xpack/connector/update_filtering", + "indices:data/write/xpack/connector/update_filtering/activate", + "indices:data/write/xpack/connector/update_filtering/draft_validation", "indices:data/write/xpack/connector/update_index_name", "indices:data/write/xpack/connector/update_last_seen", "indices:data/write/xpack/connector/update_last_sync_stats", From 426d1f6c0e531b7084993ec2e1a1e4dd8bd6646f Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 11:31:37 -0400 Subject: [PATCH 024/130] Muting (#107482) Muting https://github.com/elastic/elasticsearch/issues/107475 - DivTests - testEvaluateBlockWithNulls - testFold --- .../esql/expression/function/AbstractFunctionTestCase.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 0b6c64679dc1f..4cecec102799e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -312,6 +312,7 @@ public final void testEvaluateBlockWithoutNulls() { * Evaluates a {@link Block} of values, all copied from the input pattern with * some null values inserted between. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107475") public final void testEvaluateBlockWithNulls() { testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); } @@ -495,6 +496,7 @@ public final void testFactoryToString() { assertThat(factory.toString(), testCase.evaluatorToString()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107475") public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { From ae23a6f85a29840aa104a99d9fc5ef073e649d07 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 11:36:47 -0400 Subject: [PATCH 025/130] Muting (#107484) Muting https://github.com/elastic/elasticsearch/issues/103981 --- .../org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 5ca5da555718b..fc76ae2c67919 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -395,6 +395,7 @@ public void testILMWaitsForTimeSeriesEndTimeToLapse() throws Exception { }, 30, TimeUnit.SECONDS); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103981") @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/103981") public void testRollupNonTSIndex() throws Exception { createIndex(index, alias, false); From 635824e18679ea815840681eea9105502e0da1d4 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Mon, 15 Apr 2024 08:42:03 -0700 Subject: [PATCH 026/130] Add counters for downsampling (#107389) This PR adds counters for downsampling: success, failure and failure due to invalid configuration. They will be used in TSDB dashboard to assess health of downsampling functionality. --- .../xpack/downsample/DownsampleMetrics.java | 10 ++++- .../downsample/DownsampleShardIndexer.java | 6 +-- .../downsample/TransportDownsampleAction.java | 44 +++++++++---------- .../DownsampleActionSingleNodeTests.java | 15 ++++++- 4 files changed, 47 insertions(+), 28 deletions(-) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java index 628191d22f50a..c950658b411ed 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java @@ -31,6 +31,8 @@ public class DownsampleMetrics extends AbstractLifecycleComponent { public static final String LATENCY_SHARD = "es.tsdb.downsample.latency.shard.histogram"; public static final String LATENCY_TOTAL = "es.tsdb.downsample.latency.total.histogram"; + public static final String ACTIONS_SHARD = "es.tsdb.downsample.actions.shard.total"; + public static final String ACTIONS = "es.tsdb.downsample.actions.total"; private final MeterRegistry meterRegistry; @@ -43,6 +45,8 @@ protected void doStart() { // Register all metrics to track. meterRegistry.registerLongHistogram(LATENCY_SHARD, "Downsampling action latency per shard", "ms"); meterRegistry.registerLongHistogram(LATENCY_TOTAL, "Downsampling latency end-to-end", "ms"); + meterRegistry.registerLongCounter(ACTIONS_SHARD, "Number of shard-level downsampling actions", "count"); + meterRegistry.registerLongCounter(ACTIONS, "Number of downsampling operations", "count"); } @Override @@ -71,11 +75,13 @@ String getMessage() { } } - void recordLatencyShard(long durationInMilliSeconds, ActionStatus status) { + void recordShardOperation(long durationInMilliSeconds, ActionStatus status) { meterRegistry.getLongHistogram(LATENCY_SHARD).record(durationInMilliSeconds, Map.of(ActionStatus.NAME, status.getMessage())); + meterRegistry.getLongCounter(ACTIONS_SHARD).incrementBy(1L, Map.of(ActionStatus.NAME, status.getMessage())); } - void recordLatencyTotal(long durationInMilliSeconds, ActionStatus status) { + void recordOperation(long durationInMilliSeconds, ActionStatus status) { meterRegistry.getLongHistogram(LATENCY_TOTAL).record(durationInMilliSeconds, Map.of(ActionStatus.NAME, status.getMessage())); + meterRegistry.getLongCounter(ACTIONS).incrementBy(1L, Map.of(ActionStatus.NAME, status.getMessage())); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java index 72d4b934ecdda..81aea221ebb4f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java @@ -191,7 +191,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumSent() + "]"; logger.info(error); - downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.MISSING_DOCS); + downsampleMetrics.recordShardOperation(duration.millis(), DownsampleMetrics.ActionStatus.MISSING_DOCS); throw new DownsampleShardIndexerException(error, false); } @@ -204,7 +204,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumFailed() + "]"; logger.info(error); - downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.FAILED); + downsampleMetrics.recordShardOperation(duration.millis(), DownsampleMetrics.ActionStatus.FAILED); throw new DownsampleShardIndexerException(error, false); } @@ -214,7 +214,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept ActionListener.noop() ); logger.info("Downsampling task [" + task.getPersistentTaskId() + " on shard " + indexShard.shardId() + " completed"); - downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.SUCCESS); + downsampleMetrics.recordShardOperation(duration.millis(), DownsampleMetrics.ActionStatus.SUCCESS); return new DownsampleIndexerAction.ShardDownsampleResponse(indexShard.shardId(), task.getNumIndexed()); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index cf794fe584a63..468b4331d34ab 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -179,20 +179,20 @@ public TransportDownsampleAction( this.downsampleMetrics = downsampleMetrics; } - private void recordLatencyOnSuccess(long startTime) { - recordLatency(startTime, DownsampleMetrics.ActionStatus.SUCCESS); + private void recordSuccessMetrics(long startTime) { + recordOperation(startTime, DownsampleMetrics.ActionStatus.SUCCESS); } - private void recordLatencyOnFailure(long startTime) { - recordLatency(startTime, DownsampleMetrics.ActionStatus.FAILED); + private void recordFailureMetrics(long startTime) { + recordOperation(startTime, DownsampleMetrics.ActionStatus.FAILED); } - private void recordLatencyOnInvalidConfiguration(long startTime) { - recordLatency(startTime, DownsampleMetrics.ActionStatus.INVALID_CONFIGURATION); + private void recordInvalidConfigurationMetrics(long startTime) { + recordOperation(startTime, DownsampleMetrics.ActionStatus.INVALID_CONFIGURATION); } - private void recordLatency(long startTime, DownsampleMetrics.ActionStatus status) { - downsampleMetrics.recordLatencyTotal( + private void recordOperation(long startTime, DownsampleMetrics.ActionStatus status) { + downsampleMetrics.recordOperation( TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), status ); @@ -215,7 +215,7 @@ protected void masterOperation( boolean hasDocumentLevelPermissions = indexPermissions.getDocumentPermissions().hasDocumentLevelPermissions(); boolean hasFieldLevelSecurity = indexPermissions.getFieldPermissions().hasFieldLevelSecurity(); if (hasDocumentLevelPermissions || hasFieldLevelSecurity) { - recordLatencyOnInvalidConfiguration(startTime); + recordInvalidConfigurationMetrics(startTime); listener.onFailure( new ElasticsearchException( "Rollup forbidden for index [" + sourceIndexName + "] with document level or field level security settings." @@ -228,14 +228,14 @@ protected void masterOperation( // Assert source index exists IndexMetadata sourceIndexMetadata = state.getMetadata().index(sourceIndexName); if (sourceIndexMetadata == null) { - recordLatencyOnInvalidConfiguration(startTime); + recordInvalidConfigurationMetrics(startTime); listener.onFailure(new IndexNotFoundException(sourceIndexName)); return; } // Assert source index is a time_series index if (IndexSettings.MODE.get(sourceIndexMetadata.getSettings()) != IndexMode.TIME_SERIES) { - recordLatencyOnInvalidConfiguration(startTime); + recordInvalidConfigurationMetrics(startTime); listener.onFailure( new ElasticsearchException( "Rollup requires setting [" @@ -252,7 +252,7 @@ protected void masterOperation( // Assert source index is read-only if (state.blocks().indexBlocked(ClusterBlockLevel.WRITE, sourceIndexName) == false) { - recordLatencyOnInvalidConfiguration(startTime); + recordInvalidConfigurationMetrics(startTime); listener.onFailure( new ElasticsearchException( "Downsample requires setting [" + IndexMetadata.SETTING_BLOCKS_WRITE + " = true] for index [" + sourceIndexName + "]" @@ -327,7 +327,7 @@ protected void masterOperation( } if (validationException.validationErrors().isEmpty() == false) { - recordLatencyOnInvalidConfiguration(startTime); + recordInvalidConfigurationMetrics(startTime); delegate.onFailure(validationException); return; } @@ -336,7 +336,7 @@ protected void masterOperation( try { mapping = createDownsampleIndexMapping(helper, request.getDownsampleConfig(), mapperService, sourceIndexMappings); } catch (IOException e) { - recordLatencyOnFailure(startTime); + recordFailureMetrics(startTime); delegate.onFailure(e); return; } @@ -361,7 +361,7 @@ protected void masterOperation( dimensionFields ); } else { - recordLatencyOnFailure(startTime); + recordFailureMetrics(startTime); delegate.onFailure(new ElasticsearchException("Failed to create downsample index [" + downsampleIndexName + "]")); } }, e -> { @@ -390,7 +390,7 @@ protected void masterOperation( dimensionFields ); } else { - recordLatencyOnFailure(startTime); + recordFailureMetrics(startTime); delegate.onFailure(e); } }) @@ -516,7 +516,7 @@ public void onResponse(PersistentTasksCustomMetadata.PersistentTask { actionListener.onResponse(AcknowledgedResponse.TRUE); - recordLatencyOnSuccess(startTime); + recordSuccessMetrics(startTime); }, t -> { /* * At this point downsample index has been created @@ -1070,13 +1070,13 @@ public void onResponse(final AcknowledgedResponse response) { */ logger.error("Failed to force-merge downsample index [" + downsampleIndexName + "]", t); actionListener.onResponse(AcknowledgedResponse.TRUE); - recordLatencyOnSuccess(startTime); + recordSuccessMetrics(startTime); })); } @Override public void onFailure(Exception e) { - recordLatencyOnSuccess(startTime); + recordSuccessMetrics(startTime); this.actionListener.onFailure(e); } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index d23f1e4b89a8c..d68f6e8d11f81 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -125,6 +125,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.mockito.Mockito.mock; public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase { @@ -1178,8 +1179,12 @@ private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, D assertEquals(1, measurement.attributes().size()); assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); } + List shardActionMeasurements = plugin.getLongCounterMeasurement(DownsampleMetrics.ACTIONS_SHARD); + assertThat(shardActionMeasurements.size(), greaterThanOrEqualTo(1)); + assertThat(shardActionMeasurements.get(0).getLong(), equalTo(1L)); + assertThat(shardActionMeasurements.get(0).attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); - // Total latency gets recorded after reindex and force-merge complete. + // Total latency and counters are recorded after reindex and force-merge complete. assertBusy(() -> { final List latencyTotalMetrics = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_TOTAL); assertFalse(latencyTotalMetrics.isEmpty()); @@ -1191,6 +1196,14 @@ private void assertDownsampleIndex(String sourceIndex, String downsampleIndex, D assertEquals(1, measurement.attributes().size()); assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "invalid_configuration", "failed"))); } + + List actionMeasurements = plugin.getLongCounterMeasurement(DownsampleMetrics.ACTIONS); + assertThat(actionMeasurements.size(), greaterThanOrEqualTo(1)); + assertThat(actionMeasurements.get(0).getLong(), equalTo(1L)); + assertThat( + actionMeasurements.get(0).attributes().get("status"), + Matchers.in(List.of("success", "invalid_configuration", "failed")) + ); }, 10, TimeUnit.SECONDS); } From 3288f46fd95e9f6ff5a8f9175d5928cd29796c1d Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Mon, 15 Apr 2024 10:56:38 -0600 Subject: [PATCH 027/130] Allow writes to ILM shrink action target index (#107121) The source index of a shrink action is made read-only to perform the shrink. During shrink, settings are copied from source index to target index, causing the target index to also be read-only. This change adds a shrink policy argument making the target index writable after shrinking. The default is to keep the read-only setting to preserve current behavior. closes #106599 --- docs/changelog/107121.yaml | 6 ++ .../reference/ilm/actions/ilm-shrink.asciidoc | 6 +- docs/reference/index-modules/blocks.asciidoc | 1 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/core/ilm/ShrinkAction.java | 51 ++++++++--- .../xpack/core/ilm/ShrinkActionTests.java | 76 +++++++++++----- .../ilm/TimeseriesLifecycleTypeTests.java | 8 +- .../xpack/MigrateToDataTiersIT.java | 4 +- .../xpack/TimeSeriesRestDriver.java | 2 +- .../xpack/ilm/ExplainLifecycleIT.java | 2 +- .../xpack/ilm/TimeSeriesDataStreamsIT.java | 2 +- .../xpack/ilm/TimeseriesMoveToStepIT.java | 2 +- .../actions/SearchableSnapshotActionIT.java | 4 +- .../xpack/ilm/actions/ShrinkActionIT.java | 89 +++++++++++++++++-- .../ClusterStateWaitThresholdBreachTests.java | 2 +- .../xpack/ilm/ILMMultiNodeIT.java | 2 +- .../ilm/ILMMultiNodeWithCCRDisabledIT.java | 2 +- ...MigrateToDataTiersRoutingServiceTests.java | 6 +- .../xpack/ilm/IndexLifecycleServiceTests.java | 2 +- .../xpack/ilm/PolicyStepsRegistryTests.java | 4 +- 20 files changed, 208 insertions(+), 64 deletions(-) create mode 100644 docs/changelog/107121.yaml diff --git a/docs/changelog/107121.yaml b/docs/changelog/107121.yaml new file mode 100644 index 0000000000000..d46b1d58e9dfb --- /dev/null +++ b/docs/changelog/107121.yaml @@ -0,0 +1,6 @@ +pr: 107121 +summary: Add a flag to re-enable writes on the final index after an ILM shrink action. +area: ILM+SLM +type: enhancement +issues: + - 106599 diff --git a/docs/reference/ilm/actions/ilm-shrink.asciidoc b/docs/reference/ilm/actions/ilm-shrink.asciidoc index d440943c2ee75..8753c41313b8c 100644 --- a/docs/reference/ilm/actions/ilm-shrink.asciidoc +++ b/docs/reference/ilm/actions/ilm-shrink.asciidoc @@ -4,7 +4,7 @@ Phases allowed: hot, warm. -Sets a source index to <> and shrinks it into +<> on a source index and shrinks it into a new index with fewer primary shards. The name of the resulting index is `shrink--`. This action corresponds to the <>. @@ -50,6 +50,10 @@ with totaling 1000gb, then the target index will have 20 primary shards; if the with totaling 4000gb, then the target index will still have 60 primary shards. This parameter conflicts with `number_of_shards` in the `settings`, only one of them may be set. +`allow_write_after_shrink`:: +(Optional, boolean) +If true, the shrunken index is made writable by removing the <>. Defaults to false. + [[ilm-shrink-ex]] ==== Example diff --git a/docs/reference/index-modules/blocks.asciidoc b/docs/reference/index-modules/blocks.asciidoc index dcd6035fcf174..2d89676a8af2b 100644 --- a/docs/reference/index-modules/blocks.asciidoc +++ b/docs/reference/index-modules/blocks.asciidoc @@ -49,6 +49,7 @@ for help with resolving watermark issues. Set to `true` to disable read operations against the index. +[[index-blocks-write]] `index.blocks.write`:: Set to `true` to disable data write operations against the index. Unlike diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 978ad1ce31e28..72404e405edd1 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -173,6 +173,7 @@ static TransportVersion def(int id) { public static final TransportVersion HIGHLIGHTERS_TAGS_ON_FIELD_LEVEL = def(8_632_00_0); public static final TransportVersion TRACK_FLUSH_TIME_EXCLUDING_WAITING_ON_LOCKS = def(8_633_00_0); public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_EMBEDDINGS = def(8_634_00_0); + public static final TransportVersion ILM_SHRINK_ENABLE_WRITE = def(8_635_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java index 80732d9622c8b..60f9b7b001060 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java @@ -8,13 +8,16 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.admin.indices.shrink.ResizeNumberOfShardsCalculator; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -26,9 +29,10 @@ import java.io.IOException; import java.time.Instant; -import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; @@ -41,12 +45,13 @@ public class ShrinkAction implements LifecycleAction { public static final String NAME = "shrink"; public static final ParseField NUMBER_OF_SHARDS_FIELD = new ParseField("number_of_shards"); public static final ParseField MAX_PRIMARY_SHARD_SIZE = new ParseField("max_primary_shard_size"); + public static final ParseField ALLOW_WRITE_AFTER_SHRINK = new ParseField("allow_write_after_shrink"); public static final String CONDITIONAL_SKIP_SHRINK_STEP = BranchingStep.NAME + "-check-prerequisites"; public static final String CONDITIONAL_DATASTREAM_CHECK_KEY = BranchingStep.NAME + "-on-datastream-check"; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME, - a -> new ShrinkAction((Integer) a[0], (ByteSizeValue) a[1]) + a -> new ShrinkAction((Integer) a[0], (ByteSizeValue) a[1], (a[2] != null && (Boolean) a[2])) ); static { @@ -57,16 +62,22 @@ public class ShrinkAction implements LifecycleAction { MAX_PRIMARY_SHARD_SIZE, ObjectParser.ValueType.STRING ); + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ALLOW_WRITE_AFTER_SHRINK); } + public static final Settings CLEAR_WRITE_BLOCK_SETTINGS = Settings.builder() + .put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), (String) null) + .build(); + private Integer numberOfShards; private ByteSizeValue maxPrimaryShardSize; + private boolean allowWriteAfterShrink; public static ShrinkAction parse(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - public ShrinkAction(@Nullable Integer numberOfShards, @Nullable ByteSizeValue maxPrimaryShardSize) { + public ShrinkAction(@Nullable Integer numberOfShards, @Nullable ByteSizeValue maxPrimaryShardSize, boolean allowWriteAfterShrink) { if (numberOfShards != null && maxPrimaryShardSize != null) { throw new IllegalArgumentException("Cannot set both [number_of_shards] and [max_primary_shard_size]"); } @@ -84,6 +95,7 @@ public ShrinkAction(@Nullable Integer numberOfShards, @Nullable ByteSizeValue ma } this.numberOfShards = numberOfShards; } + this.allowWriteAfterShrink = allowWriteAfterShrink; } public ShrinkAction(StreamInput in) throws IOException { @@ -94,6 +106,7 @@ public ShrinkAction(StreamInput in) throws IOException { this.numberOfShards = null; this.maxPrimaryShardSize = ByteSizeValue.readFrom(in); } + this.allowWriteAfterShrink = in.getTransportVersion().onOrAfter(TransportVersions.ILM_SHRINK_ENABLE_WRITE) && in.readBoolean(); } public Integer getNumberOfShards() { @@ -104,6 +117,10 @@ public ByteSizeValue getMaxPrimaryShardSize() { return maxPrimaryShardSize; } + public boolean getAllowWriteAfterShrink() { + return allowWriteAfterShrink; + } + @Override public void writeTo(StreamOutput out) throws IOException { boolean hasNumberOfShards = numberOfShards != null; @@ -113,6 +130,9 @@ public void writeTo(StreamOutput out) throws IOException { } else { maxPrimaryShardSize.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ILM_SHRINK_ENABLE_WRITE)) { + out.writeBoolean(this.allowWriteAfterShrink); + } } @Override @@ -129,6 +149,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (maxPrimaryShardSize != null) { builder.field(MAX_PRIMARY_SHARD_SIZE.getPreferredName(), maxPrimaryShardSize); } + builder.field(ALLOW_WRITE_AFTER_SHRINK.getPreferredName(), allowWriteAfterShrink); builder.endObject(); return builder; } @@ -158,11 +179,13 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) StepKey isShrunkIndexKey = new StepKey(phase, NAME, ShrunkenIndexCheckStep.NAME); StepKey replaceDataStreamIndexKey = new StepKey(phase, NAME, ReplaceDataStreamBackingIndexStep.NAME); StepKey deleteIndexKey = new StepKey(phase, NAME, DeleteStep.NAME); + StepKey allowWriteKey = new StepKey(phase, NAME, UpdateSettingsStep.NAME); + StepKey lastOrNextStep = allowWriteAfterShrink ? allowWriteKey : nextStepKey; AsyncBranchingStep conditionalSkipShrinkStep = new AsyncBranchingStep( preShrinkBranchingKey, checkNotWriteIndex, - nextStepKey, + lastOrNextStep, (indexMetadata, clusterState, listener) -> { if (indexMetadata.getSettings().get(LifecycleSettings.SNAPSHOT_INDEX_NAME) != null) { logger.warn( @@ -242,7 +265,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) setSingleNodeKey ); ShrinkStep shrink = new ShrinkStep(shrinkKey, enoughShardsKey, client, numberOfShards, maxPrimaryShardSize); - // wait until the shrunk index is recovered. we again wait until the configured threshold is breached and if the shrunk index has // not successfully recovered until then, we rewind to the "cleanup-shrink-index" step to delete this unsuccessful shrunk index // and retry the operation by generating a new shrink index name and attempting to shrink again @@ -278,8 +300,12 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) ShrinkIndexNameSupplier::getShrinkIndexName ); DeleteStep deleteSourceIndexStep = new DeleteStep(deleteIndexKey, isShrunkIndexKey, client); - ShrunkenIndexCheckStep waitOnShrinkTakeover = new ShrunkenIndexCheckStep(isShrunkIndexKey, nextStepKey); - return Arrays.asList( + ShrunkenIndexCheckStep waitOnShrinkTakeover = new ShrunkenIndexCheckStep(isShrunkIndexKey, lastOrNextStep); + UpdateSettingsStep allowWriteAfterShrinkStep = allowWriteAfterShrink + ? new UpdateSettingsStep(allowWriteKey, nextStepKey, client, CLEAR_WRITE_BLOCK_SETTINGS) + : null; + + Stream steps = Stream.of( conditionalSkipShrinkStep, checkNotWriteIndexStep, waitForNoFollowersStep, @@ -297,8 +323,11 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) aliasSwapAndDelete, waitOnShrinkTakeover, replaceDataStreamBackingIndex, - deleteSourceIndexStep + deleteSourceIndexStep, + allowWriteAfterShrinkStep ); + + return steps.filter(Objects::nonNull).collect(Collectors.toList()); } @Override @@ -306,12 +335,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ShrinkAction that = (ShrinkAction) o; - return Objects.equals(numberOfShards, that.numberOfShards) && Objects.equals(maxPrimaryShardSize, that.maxPrimaryShardSize); + return Objects.equals(numberOfShards, that.numberOfShards) + && Objects.equals(maxPrimaryShardSize, that.maxPrimaryShardSize) + && Objects.equals(allowWriteAfterShrink, that.allowWriteAfterShrink); } @Override public int hashCode() { - return Objects.hash(numberOfShards, maxPrimaryShardSize); + return Objects.hash(numberOfShards, maxPrimaryShardSize, allowWriteAfterShrink); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java index e1877e006e0fd..a33d6e3332a40 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java @@ -66,19 +66,30 @@ protected ShrinkAction createTestInstance() { static ShrinkAction randomInstance() { if (randomBoolean()) { - return new ShrinkAction(randomIntBetween(1, 100), null); + return new ShrinkAction(randomIntBetween(1, 100), null, randomBoolean()); } else { - return new ShrinkAction(null, ByteSizeValue.ofBytes(randomIntBetween(1, 100))); + return new ShrinkAction(null, ByteSizeValue.ofBytes(randomIntBetween(1, 100)), randomBoolean()); } } @Override protected ShrinkAction mutateInstance(ShrinkAction action) { - if (action.getNumberOfShards() != null) { - return new ShrinkAction(action.getNumberOfShards() + randomIntBetween(1, 2), null); - } else { - return new ShrinkAction(null, ByteSizeValue.ofBytes(action.getMaxPrimaryShardSize().getBytes() + 1)); + Integer numberOfShards = action.getNumberOfShards(); + ByteSizeValue maxPrimaryShardSize = action.getMaxPrimaryShardSize(); + boolean allowWriteAfterShrink = action.getAllowWriteAfterShrink(); + + switch (randomInt(2)) { + case 0 -> { + numberOfShards = randomValueOtherThan(numberOfShards, () -> randomIntBetween(1, 100)); + maxPrimaryShardSize = null; + } + case 1 -> { + maxPrimaryShardSize = randomValueOtherThan(maxPrimaryShardSize, () -> ByteSizeValue.ofBytes(randomIntBetween(1, 100))); + numberOfShards = null; + } + case 2 -> allowWriteAfterShrink = allowWriteAfterShrink == false; } + return new ShrinkAction(numberOfShards, maxPrimaryShardSize, allowWriteAfterShrink); } @Override @@ -87,24 +98,27 @@ protected Reader instanceReader() { } public void testNonPositiveShardNumber() { - Exception e = expectThrows(Exception.class, () -> new ShrinkAction(randomIntBetween(-100, 0), null)); + Exception e = expectThrows(Exception.class, () -> new ShrinkAction(randomIntBetween(-100, 0), null, randomBoolean())); assertThat(e.getMessage(), equalTo("[number_of_shards] must be greater than 0")); } public void testMaxPrimaryShardSize() { ByteSizeValue maxPrimaryShardSize1 = ByteSizeValue.ofBytes(10); - Exception e1 = expectThrows(Exception.class, () -> new ShrinkAction(randomIntBetween(1, 100), maxPrimaryShardSize1)); + Exception e1 = expectThrows( + Exception.class, + () -> new ShrinkAction(randomIntBetween(1, 100), maxPrimaryShardSize1, randomBoolean()) + ); assertThat(e1.getMessage(), equalTo("Cannot set both [number_of_shards] and [max_primary_shard_size]")); ByteSizeValue maxPrimaryShardSize2 = ByteSizeValue.ZERO; - Exception e2 = expectThrows(Exception.class, () -> new ShrinkAction(null, maxPrimaryShardSize2)); + Exception e2 = expectThrows(Exception.class, () -> new ShrinkAction(null, maxPrimaryShardSize2, randomBoolean())); assertThat(e2.getMessage(), equalTo("[max_primary_shard_size] must be greater than 0")); } public void testPerformActionWithSkipBecauseOfShardNumber() throws InterruptedException { String lifecycleName = randomAlphaOfLengthBetween(4, 10); int numberOfShards = randomIntBetween(1, 10); - ShrinkAction action = new ShrinkAction(numberOfShards, null); + ShrinkAction action = new ShrinkAction(numberOfShards, null, randomBoolean()); StepKey nextStepKey = new StepKey( randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), @@ -121,7 +135,7 @@ public void testPerformActionWithSkipBecauseOfShardNumber() throws InterruptedEx public void testPerformActionWithSkipBecauseOfSearchableSnapshot() throws InterruptedException { String lifecycleName = randomAlphaOfLengthBetween(4, 10); int numberOfShards = randomIntBetween(1, 10); - ShrinkAction action = new ShrinkAction(numberOfShards, null); + ShrinkAction action = new ShrinkAction(numberOfShards, null, randomBoolean()); StepKey nextStepKey = new StepKey( randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), @@ -143,7 +157,7 @@ public void testPerformActionWithoutSkip() throws InterruptedException { int divisor = randomFrom(2, 3, 6); int expectedFinalShards = numShards / divisor; String lifecycleName = randomAlphaOfLengthBetween(4, 10); - ShrinkAction action = new ShrinkAction(expectedFinalShards, null); + ShrinkAction action = new ShrinkAction(expectedFinalShards, null, randomBoolean()); StepKey nextStepKey = new StepKey( randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), @@ -160,7 +174,7 @@ public void testPerformActionWithoutSkip() throws InterruptedException { public void testFailureIsPropagated() throws InterruptedException { String lifecycleName = randomAlphaOfLengthBetween(4, 10); int numberOfShards = randomIntBetween(1, 10); - ShrinkAction action = new ShrinkAction(numberOfShards, null); + ShrinkAction action = new ShrinkAction(numberOfShards, null, randomBoolean()); StepKey nextStepKey = new StepKey( randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), @@ -185,7 +199,7 @@ public void assertPerformAction( ) throws InterruptedException { String phase = randomAlphaOfLengthBetween(1, 10); List steps = action.toSteps(client, phase, nextStepKey); - AsyncBranchingStep step = ((AsyncBranchingStep) steps.get(0)); + AsyncBranchingStep branchStep = ((AsyncBranchingStep) steps.get(0)); LifecyclePolicy policy = new LifecyclePolicy( lifecycleName, @@ -211,11 +225,11 @@ public void assertPerformAction( indexMetadataBuilder.putCustom( LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY, LifecycleExecutionState.builder() - .setPhase(step.getKey().phase()) + .setPhase(branchStep.getKey().phase()) .setPhaseTime(0L) - .setAction(step.getKey().action()) + .setAction(branchStep.getKey().action()) .setActionTime(0L) - .setStep(step.getKey().name()) + .setStep(branchStep.getKey().name()) .setStepTime(0L) .build() .asMap() @@ -226,7 +240,7 @@ public void assertPerformAction( setUpIndicesStatsRequestMock(indexName, withError); CountDownLatch countDownLatch = new CountDownLatch(1); AtomicBoolean failurePropagated = new AtomicBoolean(false); - step.performAction(state.metadata().index(indexName), state, null, new ActionListener<>() { + branchStep.performAction(state.metadata().index(indexName), state, null, new ActionListener<>() { @Override public void onResponse(Void unused) { countDownLatch.countDown(); @@ -244,12 +258,18 @@ public void onFailure(Exception e) { } }); assertTrue(countDownLatch.await(5, TimeUnit.SECONDS)); + if (withError) { assertTrue(failurePropagated.get()); } else if (shouldSkip) { - assertThat(step.getNextStepKey(), equalTo(nextStepKey)); + if (action.getAllowWriteAfterShrink()) { + Step lastStep = steps.get(steps.size() - 1); + assertThat(branchStep.getNextStepKey(), equalTo(lastStep.getKey())); + } else { + assertThat(branchStep.getNextStepKey(), equalTo(nextStepKey)); + } } else { - assertThat(step.getNextStepKey(), equalTo(steps.get(1).getKey())); + assertThat(branchStep.getNextStepKey(), equalTo(steps.get(1).getKey())); } } @@ -262,7 +282,7 @@ public void testToSteps() { randomAlphaOfLengthBetween(1, 10) ); List steps = action.toSteps(client, phase, nextStepKey); - assertThat(steps.size(), equalTo(18)); + assertThat(steps.size(), equalTo(action.getAllowWriteAfterShrink() ? 19 : 18)); StepKey expectedFirstKey = new StepKey(phase, ShrinkAction.NAME, ShrinkAction.CONDITIONAL_SKIP_SHRINK_STEP); StepKey expectedSecondKey = new StepKey(phase, ShrinkAction.NAME, CheckNotDataStreamWriteIndexStep.NAME); StepKey expectedThirdKey = new StepKey(phase, ShrinkAction.NAME, WaitForNoFollowersStep.NAME); @@ -281,12 +301,16 @@ public void testToSteps() { StepKey expectedSixteenKey = new StepKey(phase, ShrinkAction.NAME, ShrunkenIndexCheckStep.NAME); StepKey expectedSeventeenKey = new StepKey(phase, ShrinkAction.NAME, ReplaceDataStreamBackingIndexStep.NAME); StepKey expectedEighteenKey = new StepKey(phase, ShrinkAction.NAME, DeleteStep.NAME); + StepKey expectedNineteenthKey = new StepKey(phase, ShrinkAction.NAME, UpdateSettingsStep.NAME); assertTrue(steps.get(0) instanceof AsyncBranchingStep); assertThat(steps.get(0).getKey(), equalTo(expectedFirstKey)); expectThrows(IllegalStateException.class, () -> steps.get(0).getNextStepKey()); assertThat(((AsyncBranchingStep) steps.get(0)).getNextStepKeyOnFalse(), equalTo(expectedSecondKey)); - assertThat(((AsyncBranchingStep) steps.get(0)).getNextStepKeyOnTrue(), equalTo(nextStepKey)); + assertThat( + ((AsyncBranchingStep) steps.get(0)).getNextStepKeyOnTrue(), + equalTo(action.getAllowWriteAfterShrink() ? expectedNineteenthKey : nextStepKey) + ); assertTrue(steps.get(1) instanceof CheckNotDataStreamWriteIndexStep); assertThat(steps.get(1).getKey(), equalTo(expectedSecondKey)); @@ -357,7 +381,7 @@ public void testToSteps() { assertTrue(steps.get(15) instanceof ShrunkenIndexCheckStep); assertThat(steps.get(15).getKey(), equalTo(expectedSixteenKey)); - assertThat(steps.get(15).getNextStepKey(), equalTo(nextStepKey)); + assertThat(steps.get(15).getNextStepKey(), equalTo(action.getAllowWriteAfterShrink() ? expectedNineteenthKey : nextStepKey)); assertTrue(steps.get(16) instanceof ReplaceDataStreamBackingIndexStep); assertThat(steps.get(16).getKey(), equalTo(expectedSeventeenKey)); @@ -366,6 +390,12 @@ public void testToSteps() { assertTrue(steps.get(17) instanceof DeleteStep); assertThat(steps.get(17).getKey(), equalTo(expectedEighteenKey)); assertThat(steps.get(17).getNextStepKey(), equalTo(expectedSixteenKey)); + + if (action.getAllowWriteAfterShrink()) { + assertTrue(steps.get(18) instanceof UpdateSettingsStep); + assertThat(steps.get(18).getKey(), equalTo(expectedNineteenthKey)); + assertThat(steps.get(18).getNextStepKey(), equalTo(nextStepKey)); + } } private void setUpIndicesStatsRequestMock(String index, boolean withError) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java index 5638d614d9573..55fa3792fa6c7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java @@ -74,7 +74,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { null, null ); - private static final ShrinkAction TEST_SHRINK_ACTION = new ShrinkAction(1, null); + private static final ShrinkAction TEST_SHRINK_ACTION = new ShrinkAction(1, null, false); private static final ReadOnlyAction TEST_READ_ONLY_ACTION = new ReadOnlyAction(); private static final SetPriorityAction TEST_PRIORITY_ACTION = new SetPriorityAction(0); @@ -261,7 +261,7 @@ public void testActionsThatCannotFollowSearchableSnapshot() { public void testValidateActionsFollowingSearchableSnapshot() { { Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction("repo"))); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(ShrinkAction.NAME, new ShrinkAction(1, null))); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(ShrinkAction.NAME, new ShrinkAction(1, null, false))); Phase coldPhase = new Phase("cold", TimeValue.ZERO, Map.of(FreezeAction.NAME, FreezeAction.INSTANCE)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -277,7 +277,7 @@ public void testValidateActionsFollowingSearchableSnapshot() { } { - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(ShrinkAction.NAME, new ShrinkAction(1, null))); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(ShrinkAction.NAME, new ShrinkAction(1, null, false))); Phase coldPhase = new Phase( "cold", TimeValue.ZERO, @@ -299,7 +299,7 @@ public void testValidateActionsFollowingSearchableSnapshot() { { Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction("repo"))); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(ShrinkAction.NAME, new ShrinkAction(1, null))); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(ShrinkAction.NAME, new ShrinkAction(1, null, false))); Phase coldPhase = new Phase( "cold", TimeValue.ZERO, diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java index 8706a54a0d893..60e71b095039e 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java @@ -102,7 +102,7 @@ public void testMigrateToDataTiersAction() throws Exception { warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1, null)); warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, singletonMap("data", "warm"), null, null)); - warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null)); + warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, singletonMap("data", "cold"))); @@ -378,7 +378,7 @@ public void testMigrationDryRun() throws Exception { warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1, null)); warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, singletonMap("data", "warm"), null, null)); - warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null)); + warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, singletonMap("data", "cold"))); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java index fe0b98ab658da..c46d4d334cd09 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java @@ -207,7 +207,7 @@ public static void createFullPolicy(RestClient client, String policyName, TimeVa null ) ); - warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null)); + warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); coldActions.put( diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java index 8d51c380e96cf..dc8c248bbbad6 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ExplainLifecycleIT.java @@ -76,7 +76,7 @@ public void testExplainFilters() throws Exception { { // Create a "shrink-only-policy" Map warmActions = new HashMap<>(); - warmActions.put(ShrinkAction.NAME, new ShrinkAction(17, null)); + warmActions.put(ShrinkAction.NAME, new ShrinkAction(17, null, false)); Map phases = new HashMap<>(); phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions)); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy("shrink-only-policy", phases); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java index 95735ffbe8a87..c97d911e9de02 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesDataStreamsIT.java @@ -122,7 +122,7 @@ public void testRolloverIsSkippedOnManualDataStreamRollover() throws Exception { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/70595") public void testShrinkActionInPolicyWithoutHotPhase() throws Exception { - createNewSingletonPolicy(client(), policyName, "warm", new ShrinkAction(1, null)); + createNewSingletonPolicy(client(), policyName, "warm", new ShrinkAction(1, null, false)); createComposableTemplate(client(), template, dataStream + "*", getTemplate(policyName)); indexDocument(client(), dataStream, true); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeseriesMoveToStepIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeseriesMoveToStepIT.java index 651dffacdfe0e..5a61af793d907 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeseriesMoveToStepIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeseriesMoveToStepIT.java @@ -142,7 +142,7 @@ public void testMoveToRolloverStep() throws Exception { } public void testMoveToInjectedStep() throws Exception { - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(1, null), TimeValue.timeValueHours(12)); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(1, null, false), TimeValue.timeValueHours(12)); createIndexWithSettings( client(), diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index 361cfd79b5e88..a5ffbd86416a9 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -326,7 +326,7 @@ public void testUpdatePolicyToAddPhasesYieldsInvalidActionsToBeSkipped() throws new Phase( "warm", TimeValue.ZERO, - Map.of(ShrinkAction.NAME, new ShrinkAction(1, null), ForceMergeAction.NAME, new ForceMergeAction(1, null)) + Map.of(ShrinkAction.NAME, new ShrinkAction(1, null, false), ForceMergeAction.NAME, new ForceMergeAction(1, null)) ), new Phase("cold", TimeValue.ZERO, Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo))), null, @@ -416,7 +416,7 @@ public void testRestoredIndexManagedByLocalPolicySkipsIllegalActions() throws Ex new Phase( "warm", TimeValue.ZERO, - Map.of(ShrinkAction.NAME, new ShrinkAction(1, null), ForceMergeAction.NAME, new ForceMergeAction(1, null)) + Map.of(ShrinkAction.NAME, new ShrinkAction(1, null, false), ForceMergeAction.NAME, new ForceMergeAction(1, null)) ), new Phase("cold", TimeValue.ZERO, Map.of(FreezeAction.NAME, FreezeAction.INSTANCE)), null, diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java index 0d9c135bd5a60..d2f2dbbd0c9fb 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java @@ -83,7 +83,7 @@ public void testShrinkAction() throws Exception { alias, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null)); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null, false)); updatePolicy(client(), index, policy); String shrunkenIndexName = waitAndGetShrinkIndexName(client(), index); @@ -109,7 +109,7 @@ public void testSkipShrinkSameShardsWithNumberOfShards() throws Exception { alias, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(numberOfShards, null)); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(numberOfShards, null, false)); updatePolicy(client(), index, policy); assertBusy(() -> { assertTrue(indexExists(index)); @@ -130,7 +130,7 @@ public void testSkipShrinkSameShardsWithMaxShardSize() throws Exception { alias, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(null, ByteSizeValue.ofGb(50))); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(null, ByteSizeValue.ofGb(50), false)); updatePolicy(client(), index, policy); assertBusy(() -> { assertTrue(indexExists(index)); @@ -161,7 +161,7 @@ public void testShrinkDuringSnapshot() throws Exception { ); assertOK(client().performRequest(request)); // create delete policy - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(1, null), TimeValue.timeValueMillis(0)); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(1, null, false), TimeValue.timeValueMillis(0)); // create index without policy createIndexWithSettings( client(), @@ -209,7 +209,8 @@ public void testShrinkActionInTheHotPhase() throws Exception { RolloverAction.NAME, new RolloverAction(null, null, null, 1L, null, null, null, null, null, null), ShrinkAction.NAME, - new ShrinkAction(expectedFinalShards, null) + new ShrinkAction(expectedFinalShards, null, false) + ); Map phases = Map.of("hot", new Phase("hot", TimeValue.ZERO, hotActions)); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); @@ -279,7 +280,7 @@ public void testSetSingleNodeAllocationRetriesUntilItSucceeds() throws Exception // assign the policy that'll attempt to shrink the index (disabling the migrate action as it'll otherwise wait for // all shards to be active and we want that to happen as part of the shrink action) MigrateAction migrateAction = MigrateAction.DISABLED; - ShrinkAction shrinkAction = new ShrinkAction(expectedFinalShards, null); + ShrinkAction shrinkAction = new ShrinkAction(expectedFinalShards, null, false); Phase phase = new Phase( "warm", TimeValue.ZERO, @@ -332,7 +333,8 @@ public void testAutomaticRetryFailedShrinkAction() throws Exception { alias, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(numShards + randomIntBetween(1, numShards), null)); + var shrinkAction = new ShrinkAction(numShards + randomIntBetween(1, numShards), null, false); + createNewSingletonPolicy(client(), policy, "warm", shrinkAction); updatePolicy(client(), index, policy); assertBusy( () -> assertThat( @@ -344,7 +346,7 @@ public void testAutomaticRetryFailedShrinkAction() throws Exception { ); // update policy to be correct - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null)); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null, false)); updatePolicy(client(), index, policy); // assert corrected policy is picked up and index is shrunken @@ -379,7 +381,7 @@ public void testTotalShardsPerNodeTooLow() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), numShards - 2) ); - createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null)); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null, false)); updatePolicy(client(), index, policy); String shrunkenIndexName = waitAndGetShrinkIndexName(client(), index); @@ -397,4 +399,73 @@ public void testTotalShardsPerNodeTooLow() throws Exception { }); expectThrows(ResponseException.class, () -> indexDocument(client(), index)); } + + public void testAllowWritesInShrunkIndex() throws Exception { + int numShards = 4; + int divisor = randomFrom(2, 4); + int expectedFinalShards = numShards / divisor; + boolean initialIndexIsReadOnly = randomBoolean(); + createIndexWithSettings( + client(), + index, + alias, + Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), initialIndexIsReadOnly ? "true" : null) + ); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(expectedFinalShards, null, true)); + updatePolicy(client(), index, policy); + + String shrunkenIndexName = waitAndGetShrinkIndexName(client(), index); + assertBusy(() -> assertTrue(indexExists(shrunkenIndexName)), 30, TimeUnit.SECONDS); + assertBusy(() -> assertTrue(aliasExists(shrunkenIndexName, index))); + assertBusy( + () -> assertThat(getStepKeyForIndex(client(), shrunkenIndexName), equalTo(PhaseCompleteStep.finalStep("warm").getKey())) + ); + assertBusy(() -> { + Map settings = getOnlyIndexSettings(client(), shrunkenIndexName); + assertThat(settings.get(SETTING_NUMBER_OF_SHARDS), equalTo(String.valueOf(expectedFinalShards))); + assertThat(settings.get(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id"), nullValue()); + + // check that write block removed + assertNull(settings.get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey())); + }); + + indexDocument(client(), index); + // check that actually wrote to index + assertBusy(() -> assertDocCount(client(), index, 1)); + } + + public void testAllowWritesWhenShrinkIsSkipped() throws Exception { + int numberOfShards = randomFrom(1, 2); + boolean initialIndexIsReadOnly = randomBoolean(); + createIndexWithSettings( + client(), + index, + alias, + Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), initialIndexIsReadOnly ? "true" : null) + ); + createNewSingletonPolicy(client(), policy, "warm", new ShrinkAction(numberOfShards, null, true)); + updatePolicy(client(), index, policy); + assertBusy(() -> { + assertTrue(indexExists(index)); + Map settings = getOnlyIndexSettings(client(), index); + assertThat(getStepKeyForIndex(client(), index), equalTo(PhaseCompleteStep.finalStep("warm").getKey())); + assertThat(settings.get(SETTING_NUMBER_OF_SHARDS), equalTo(String.valueOf(numberOfShards))); + assertThat(settings.get(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id"), nullValue()); + // the shrink action was effectively skipped so there must not be any `shrink_index_name` in the ILM state + assertThat(explainIndex(client(), index).get(SHRINK_INDEX_NAME), nullValue()); + + // check that write block removed + assertNull(settings.get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey())); + }); + + indexDocument(client(), index); + // check that actually wrote to index + assertBusy(() -> assertDocCount(client(), index, 1)); + } } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java index 287374896fd23..7602a2cd16e78 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java @@ -88,7 +88,7 @@ public void testWaitInShrunkShardsAllocatedExceedsThreshold() throws Exception { Phase warmPhase = new Phase( "warm", TimeValue.ZERO, - Map.of(MigrateAction.NAME, MigrateAction.DISABLED, ShrinkAction.NAME, new ShrinkAction(1, null)) + Map.of(MigrateAction.NAME, MigrateAction.DISABLED, ShrinkAction.NAME, new ShrinkAction(1, null, false)) ); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("warm", warmPhase)); PutLifecycleRequest putLifecycleRequest = new PutLifecycleRequest(lifecyclePolicy); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java index 4164911dcad79..46c47869d8651 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java @@ -70,7 +70,7 @@ public void testShrinkOnTiers() throws Exception { RolloverAction rolloverAction = new RolloverAction(null, null, null, 1L, null, null, null, null, null, null); Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.singletonMap(rolloverAction.getWriteableName(), rolloverAction)); - ShrinkAction shrinkAction = new ShrinkAction(1, null); + ShrinkAction shrinkAction = new ShrinkAction(1, null, false); Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.singletonMap(shrinkAction.getWriteableName(), shrinkAction)); Map phases = new HashMap<>(); phases.put(hotPhase.getName(), hotPhase); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java index 7314587573f96..07c82f3dcfe98 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java @@ -70,7 +70,7 @@ public void testShrinkOnTiers() throws Exception { ensureGreen(); Map actions = new HashMap<>(); RolloverAction rolloverAction = new RolloverAction(null, null, null, 1L, null, null, null, null, null, null); - ShrinkAction shrinkAction = new ShrinkAction(1, null); + ShrinkAction shrinkAction = new ShrinkAction(1, null, false); actions.put(rolloverAction.getWriteableName(), rolloverAction); actions.put(shrinkAction.getWriteableName(), shrinkAction); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java index 63c029316536f..51df651ea4a4c 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingServiceTests.java @@ -102,7 +102,7 @@ public void setupTestEntities() { } public void testMigrateIlmPolicyForIndexWithoutILMMetadata() { - ShrinkAction shrinkAction = new ShrinkAction(2, null); + ShrinkAction shrinkAction = new ShrinkAction(2, null, false); AllocateAction warmAllocateAction = new AllocateAction(null, null, Map.of("data", "warm"), null, Map.of("rack", "rack1")); AllocateAction coldAllocateAction = new AllocateAction(0, null, null, null, Map.of("data", "cold")); SetPriorityAction warmSetPriority = new SetPriorityAction(100); @@ -153,7 +153,7 @@ public void testMigrateIlmPolicyForIndexWithoutILMMetadata() { } public void testMigrateIlmPolicyForPhaseWithDeactivatedMigrateAction() { - ShrinkAction shrinkAction = new ShrinkAction(2, null); + ShrinkAction shrinkAction = new ShrinkAction(2, null, false); AllocateAction warmAllocateAction = new AllocateAction(null, null, Map.of("data", "warm"), null, Map.of("rack", "rack1")); LifecyclePolicy policy = new LifecyclePolicy( @@ -216,7 +216,7 @@ public void testMigrateIlmPolicyForPhaseWithDeactivatedMigrateAction() { @SuppressWarnings("unchecked") public void testMigrateIlmPolicyRefreshesCachedPhase() { - ShrinkAction shrinkAction = new ShrinkAction(2, null); + ShrinkAction shrinkAction = new ShrinkAction(2, null, false); AllocateAction warmAllocateAction = new AllocateAction(null, null, Map.of("data", "warm"), null, Map.of("rack", "rack1")); AllocateAction coldAllocateAction = new AllocateAction(0, null, null, null, Map.of("data", "cold")); SetPriorityAction warmSetPriority = new SetPriorityAction(100); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index dd1e2bb9d8dd7..b3c92c0f6b128 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -233,7 +233,7 @@ public void testRequestedStopOnShrink() { public void testRequestedStopInShrinkActionButNotShrinkStep() { // test all the shrink action steps that ILM can be stopped during (basically all of them minus the actual shrink) - ShrinkAction action = new ShrinkAction(1, null); + ShrinkAction action = new ShrinkAction(1, null, false); action.toSteps(mock(Client.class), "warm", randomStepKey()) .stream() .map(sk -> sk.getKey().name()) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java index cd1a5d3744f02..36d537a57382c 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java @@ -335,7 +335,7 @@ public void testUpdatePolicyButNoPhaseChangeIndexStepsDontChange() throws Except Mockito.when(client.settings()).thenReturn(Settings.EMPTY); String policyName = randomAlphaOfLength(5); Map actions = new HashMap<>(); - actions.put("shrink", new ShrinkAction(1, null)); + actions.put("shrink", new ShrinkAction(1, null, false)); Map phases = new HashMap<>(); Phase warmPhase = new Phase("warm", TimeValue.ZERO, actions); PhaseExecutionInfo pei = new PhaseExecutionInfo(policyName, warmPhase, 1, randomNonNegativeLong()); @@ -344,7 +344,7 @@ public void testUpdatePolicyButNoPhaseChangeIndexStepsDontChange() throws Except LifecyclePolicy newPolicy = new LifecyclePolicy(policyName, phases); // Modify the policy actions = new HashMap<>(); - actions.put("shrink", new ShrinkAction(2, null)); + actions.put("shrink", new ShrinkAction(2, null, false)); phases = new HashMap<>(); phases.put("warm", new Phase("warm", TimeValue.ZERO, actions)); LifecyclePolicy updatedPolicy = new LifecyclePolicy(policyName, phases); From 7706bedfe816c2415f0da04555d27ab306fdd33d Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Mon, 15 Apr 2024 19:57:41 +0200 Subject: [PATCH 028/130] Fix monitoring-es-mb.json (#107486) --- .../template-resources/src/main/resources/monitoring-es-mb.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json index 315d6904deb93..27262507518d2 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json @@ -2366,7 +2366,7 @@ "shards_stats": { "properties": { "total_count": { - "type": long + "type": "long" } } }, From a071209814d2150ab98d9ea159a47f460588375a Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 17:10:22 -0400 Subject: [PATCH 029/130] muting (#107495) Muting https://github.com/elastic/elasticsearch/issues/106126 --- .../elasticsearch/lucene/spatial/CentroidCalculatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java index f28a45e702e39..3fdea8332dc33 100644 --- a/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java @@ -319,6 +319,7 @@ public void testGeometryCollection() { assertThat(calculator, matchesCentroid(addFromCalculator)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106126") public void testAddDifferentDimensionalType() { Point point = randomPoint(); Line line = randomLine(); From d8348560a9d20c3d020782c3d3cbcd602d9b4251 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 15 Apr 2024 17:17:34 -0400 Subject: [PATCH 030/130] muting (#107496) Muting https://github.com/elastic/elasticsearch/issues/100062 --- .../org/elasticsearch/http/ClusterHealthRestCancellationIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java index 64dd20f1fdfc4..5c3fb155588a7 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java @@ -30,6 +30,7 @@ public class ClusterHealthRestCancellationIT extends HttpSmokeTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100062") @TestIssueLogging( issueUrl = "https://github.com/elastic/elasticsearch/issues/100062", value = "org.elasticsearch.test.TaskAssertions:TRACE" From 353abef214950a5ebe15afddbb84aa56e55dd732 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Mon, 15 Apr 2024 18:39:26 -0400 Subject: [PATCH 031/130] [ES|QL] Base64 decoding and encoding functions (#107390) * add base64 functions --- .../description/from_base64.asciidoc | 5 + .../functions/description/to_base64.asciidoc | 5 + .../functions/examples/from_base64.asciidoc | 13 ++ .../functions/examples/to_base64.asciidoc | 13 ++ .../kibana/definition/from_base64.json | 35 +++++ .../kibana/definition/to_base64.json | 35 +++++ .../esql/functions/kibana/docs/from_base64.md | 11 ++ .../esql/functions/kibana/docs/to_base64.md | 11 ++ .../functions/layout/from_base64.asciidoc | 15 ++ .../esql/functions/layout/to_base64.asciidoc | 15 ++ .../functions/parameters/from_base64.asciidoc | 6 + .../functions/parameters/to_base64.asciidoc | 6 + .../esql/functions/signature/from_base64.svg | 1 + .../esql/functions/signature/to_base64.svg | 1 + .../type-conversion-functions.asciidoc | 4 + .../esql/functions/types/from_base64.asciidoc | 10 ++ .../esql/functions/types/to_base64.asciidoc | 10 ++ .../src/main/resources/meta.csv-spec | 10 +- .../src/main/resources/string.csv-spec | 47 +++++++ .../scalar/convert/FromBase64Evaluator.java | 120 ++++++++++++++++ .../scalar/convert/ToBase64Evaluator.java | 131 ++++++++++++++++++ .../function/EsqlFunctionRegistry.java | 4 + .../function/scalar/convert/FromBase64.java | 91 ++++++++++++ .../function/scalar/convert/ToBase64.java | 90 ++++++++++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 6 + .../xpack/esql/plugin/EsqlFeatures.java | 6 + .../scalar/convert/FromBase64Tests.java | 65 +++++++++ .../scalar/convert/ToBase64Tests.java | 65 +++++++++ 28 files changed, 830 insertions(+), 1 deletion(-) create mode 100644 docs/reference/esql/functions/description/from_base64.asciidoc create mode 100644 docs/reference/esql/functions/description/to_base64.asciidoc create mode 100644 docs/reference/esql/functions/examples/from_base64.asciidoc create mode 100644 docs/reference/esql/functions/examples/to_base64.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/from_base64.json create mode 100644 docs/reference/esql/functions/kibana/definition/to_base64.json create mode 100644 docs/reference/esql/functions/kibana/docs/from_base64.md create mode 100644 docs/reference/esql/functions/kibana/docs/to_base64.md create mode 100644 docs/reference/esql/functions/layout/from_base64.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_base64.asciidoc create mode 100644 docs/reference/esql/functions/parameters/from_base64.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_base64.asciidoc create mode 100644 docs/reference/esql/functions/signature/from_base64.svg create mode 100644 docs/reference/esql/functions/signature/to_base64.svg create mode 100644 docs/reference/esql/functions/types/from_base64.asciidoc create mode 100644 docs/reference/esql/functions/types/to_base64.asciidoc create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java create mode 100644 x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java diff --git a/docs/reference/esql/functions/description/from_base64.asciidoc b/docs/reference/esql/functions/description/from_base64.asciidoc new file mode 100644 index 0000000000000..e360e2a51d575 --- /dev/null +++ b/docs/reference/esql/functions/description/from_base64.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Decode a base64 string. diff --git a/docs/reference/esql/functions/description/to_base64.asciidoc b/docs/reference/esql/functions/description/to_base64.asciidoc new file mode 100644 index 0000000000000..0b282e2ed755b --- /dev/null +++ b/docs/reference/esql/functions/description/to_base64.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Encode a string to a base64 string. diff --git a/docs/reference/esql/functions/examples/from_base64.asciidoc b/docs/reference/esql/functions/examples/from_base64.asciidoc new file mode 100644 index 0000000000000..0489a88911876 --- /dev/null +++ b/docs/reference/esql/functions/examples/from_base64.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=from_base64] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=from_base64-result] +|=== + diff --git a/docs/reference/esql/functions/examples/to_base64.asciidoc b/docs/reference/esql/functions/examples/to_base64.asciidoc new file mode 100644 index 0000000000000..7ac717440faa5 --- /dev/null +++ b/docs/reference/esql/functions/examples/to_base64.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=to_base64] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=to_base64-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/from_base64.json b/docs/reference/esql/functions/kibana/definition/from_base64.json new file mode 100644 index 0000000000000..4f09b52bb06f6 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/from_base64.json @@ -0,0 +1,35 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "from_base64", + "description" : "Decode a base64 string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "A base64 string." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "A base64 string." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "row a = \"ZWxhc3RpYw==\" \n| eval d = from_base64(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/definition/to_base64.json b/docs/reference/esql/functions/kibana/definition/to_base64.json new file mode 100644 index 0000000000000..074a644bf1bab --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/to_base64.json @@ -0,0 +1,35 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "to_base64", + "description" : "Encode a string to a base64 string.", + "signatures" : [ + { + "params" : [ + { + "name" : "string", + "type" : "keyword", + "optional" : false, + "description" : "A string." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "string", + "type" : "text", + "optional" : false, + "description" : "A string." + } + ], + "variadic" : false, + "returnType" : "keyword" + } + ], + "examples" : [ + "row a = \"elastic\" \n| eval e = to_base64(a)" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/from_base64.md b/docs/reference/esql/functions/kibana/docs/from_base64.md new file mode 100644 index 0000000000000..3b4a88f77c73a --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/from_base64.md @@ -0,0 +1,11 @@ + + +### FROM_BASE64 +Decode a base64 string. + +``` +row a = "ZWxhc3RpYw==" +| eval d = from_base64(a) +``` diff --git a/docs/reference/esql/functions/kibana/docs/to_base64.md b/docs/reference/esql/functions/kibana/docs/to_base64.md new file mode 100644 index 0000000000000..2863a69da7b29 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/to_base64.md @@ -0,0 +1,11 @@ + + +### TO_BASE64 +Encode a string to a base64 string. + +``` +row a = "elastic" +| eval e = to_base64(a) +``` diff --git a/docs/reference/esql/functions/layout/from_base64.asciidoc b/docs/reference/esql/functions/layout/from_base64.asciidoc new file mode 100644 index 0000000000000..72711866c8b02 --- /dev/null +++ b/docs/reference/esql/functions/layout/from_base64.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-from_base64]] +=== `FROM_BASE64` + +*Syntax* + +[.text-center] +image::esql/functions/signature/from_base64.svg[Embedded,opts=inline] + +include::../parameters/from_base64.asciidoc[] +include::../description/from_base64.asciidoc[] +include::../types/from_base64.asciidoc[] +include::../examples/from_base64.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_base64.asciidoc b/docs/reference/esql/functions/layout/to_base64.asciidoc new file mode 100644 index 0000000000000..8b8a552c0cd7b --- /dev/null +++ b/docs/reference/esql/functions/layout/to_base64.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-to_base64]] +=== `TO_BASE64` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_base64.svg[Embedded,opts=inline] + +include::../parameters/to_base64.asciidoc[] +include::../description/to_base64.asciidoc[] +include::../types/to_base64.asciidoc[] +include::../examples/to_base64.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/from_base64.asciidoc b/docs/reference/esql/functions/parameters/from_base64.asciidoc new file mode 100644 index 0000000000000..302a3d4e131a8 --- /dev/null +++ b/docs/reference/esql/functions/parameters/from_base64.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +A base64 string. diff --git a/docs/reference/esql/functions/parameters/to_base64.asciidoc b/docs/reference/esql/functions/parameters/to_base64.asciidoc new file mode 100644 index 0000000000000..f868fa477fc35 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_base64.asciidoc @@ -0,0 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`string`:: +A string. diff --git a/docs/reference/esql/functions/signature/from_base64.svg b/docs/reference/esql/functions/signature/from_base64.svg new file mode 100644 index 0000000000000..b4a45a27d9935 --- /dev/null +++ b/docs/reference/esql/functions/signature/from_base64.svg @@ -0,0 +1 @@ +FROM_BASE64(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_base64.svg b/docs/reference/esql/functions/signature/to_base64.svg new file mode 100644 index 0000000000000..629e91089417b --- /dev/null +++ b/docs/reference/esql/functions/signature/to_base64.svg @@ -0,0 +1 @@ +TO_BASE64(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/type-conversion-functions.asciidoc b/docs/reference/esql/functions/type-conversion-functions.asciidoc index eb19f1968ebde..bbfaef73131b8 100644 --- a/docs/reference/esql/functions/type-conversion-functions.asciidoc +++ b/docs/reference/esql/functions/type-conversion-functions.asciidoc @@ -8,6 +8,8 @@ {esql} supports these type conversion functions: // tag::type_list[] +* <> +* <> * <> * <> * <> @@ -25,6 +27,8 @@ * <> // end::type_list[] +include::layout/from_base64.asciidoc[] +include::layout/to_base64.asciidoc[] include::to_boolean.asciidoc[] include::to_cartesianpoint.asciidoc[] include::to_cartesianshape.asciidoc[] diff --git a/docs/reference/esql/functions/types/from_base64.asciidoc b/docs/reference/esql/functions/types/from_base64.asciidoc new file mode 100644 index 0000000000000..1ba0e98ec8f09 --- /dev/null +++ b/docs/reference/esql/functions/types/from_base64.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | result +keyword | keyword +text | keyword +|=== diff --git a/docs/reference/esql/functions/types/to_base64.asciidoc b/docs/reference/esql/functions/types/to_base64.asciidoc new file mode 100644 index 0000000000000..1ba0e98ec8f09 --- /dev/null +++ b/docs/reference/esql/functions/types/to_base64.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +string | result +keyword | keyword +text | keyword +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 4c7feb9416e13..591aa3fbcc63f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -26,6 +26,7 @@ synopsis:keyword double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" +"keyword from_base64(string:keyword|text)" "integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" "integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" "keyword left(string:keyword|text, length:integer)" @@ -77,6 +78,7 @@ double pi() "double tan(angle:double|integer|long|unsigned_long)" "double tanh(angle:double|integer|long|unsigned_long)" double tau() +"keyword to_base64(string:keyword|text)" "boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" "cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" @@ -136,6 +138,7 @@ date_trunc |[interval, date] |["date_period|time_duration" e |null |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[, ] floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. +from_base64 |string |"keyword|text" |A base64 string. greatest |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] @@ -187,6 +190,7 @@ sum |number |"double|integer|long" tan |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. tanh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. tau |null |null |null +to_base64 |string |"keyword|text" |A string. to_bool |field |"boolean|keyword|text|double|long|unsigned_long|integer" |[""] to_boolean |field |"boolean|keyword|text|double|long|unsigned_long|integer" |[""] to_cartesianpo|field |"cartesian_point|keyword|text" |[""] @@ -247,6 +251,7 @@ date_trunc |Rounds down a date to the closest interval. e |Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. ends_with |Returns a boolean that indicates whether a keyword string ends with another string floor |Round a number down to the nearest integer. +from_base64 |Decode a base64 string. greatest |Returns the maximum value from many columns. least |Returns the minimum value from many columns. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. @@ -298,6 +303,7 @@ sum |The sum of a numeric field. tan |Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. tanh |Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. tau |The ratio of a circle’s circumference to its radius. +to_base64 |Encode a string to a base64 string. to_bool |Converts an input value to a boolean value. to_boolean |Converts an input value to a boolean value. to_cartesianpo|Converts an input value to a point value. @@ -359,6 +365,7 @@ date_trunc |date e |double |null |false |false ends_with |boolean |[false, false] |false |false floor |"double|integer|long|unsigned_long" |false |false |false +from_base64 |keyword |false |false |false greatest |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false left |keyword |[false, false] |false |false @@ -410,6 +417,7 @@ sum |long tan |double |false |false |false tanh |double |false |false |false tau |double |null |false |false +to_base64 |keyword |false |false |false to_bool |boolean |false |false |false to_boolean |boolean |false |false |false to_cartesianpo|cartesian_point |false |false |false @@ -455,5 +463,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -102 | 102 | 102 +104 | 104 | 104 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 7de8f36e48b01..2a9aad06de159 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1320,3 +1320,50 @@ l1:integer | l2:integer 2 | null null | 0 ; + +base64Encode#[skip:-8.13.99,reason:new base64 function added in 8.14] +required_feature: esql.base64_decode_encode + +// tag::to_base64[] +row a = "elastic" +| eval e = to_base64(a) +// end::to_base64[] +; + +// tag::to_base64-result[] +a:keyword | e:keyword +elastic | ZWxhc3RpYw== +// end::to_base64-result[] +; + +base64Decode#[skip:-8.13.99,reason:new base64 function added in 8.14] +required_feature: esql.base64_decode_encode + +// tag::from_base64[] +row a = "ZWxhc3RpYw==" +| eval d = from_base64(a) +// end::from_base64[] +; + +// tag::from_base64-result[] +a:keyword | d:keyword +ZWxhc3RpYw== | elastic +// end::from_base64-result[] +; + +base64EncodeDecodeEmp#[skip:-8.13.99,reason:new base64 function added in 8.14] +required_feature: esql.base64_decode_encode + +from employees +| where emp_no < 10032 and emp_no > 10027 +| eval e = to_base64(first_name), d = from_base64(e) +| keep emp_no, first_name, e, d +| sort emp_no +; + +emp_no:integer | first_name:keyword | e:keyword | d:keyword +10028 | Domenick | RG9tZW5pY2s= | Domenick +10029 | Otmar | T3RtYXI= | Otmar +10030 | null | null | null +10031 | null | null | null +; diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java new file mode 100644 index 0000000000000..5eb0071b2264a --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Evaluator.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.function.Function; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link FromBase64}. + * This class is generated. Do not edit it. + */ +public final class FromBase64Evaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field; + + private final BytesRefBuilder oScratch; + + private final DriverContext driverContext; + + public FromBase64Evaluator(Source source, EvalOperator.ExpressionEvaluator field, + BytesRefBuilder oScratch, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.field = field; + this.oScratch = oScratch; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock fieldBlock = (BytesRefBlock) field.eval(page)) { + BytesRefVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector).asBlock(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef fieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBytesRef(FromBase64.process(fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch), oScratch)); + } + return result.build(); + } + } + + public BytesRefVector eval(int positionCount, BytesRefVector fieldVector) { + try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) { + BytesRef fieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBytesRef(FromBase64.process(fieldVector.getBytesRef(p, fieldScratch), oScratch)); + } + return result.build(); + } + } + + @Override + public String toString() { + return "FromBase64Evaluator[" + "field=" + field + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final Function oScratch; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, + Function oScratch) { + this.source = source; + this.field = field; + this.oScratch = oScratch; + } + + @Override + public FromBase64Evaluator get(DriverContext context) { + return new FromBase64Evaluator(source, field.get(context), oScratch.apply(context), context); + } + + @Override + public String toString() { + return "FromBase64Evaluator[" + "field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java new file mode 100644 index 0000000000000..785a935e73f39 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Evaluator.java @@ -0,0 +1,131 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import java.util.function.Function; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToBase64}. + * This class is generated. Do not edit it. + */ +public final class ToBase64Evaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator field; + + private final BytesRefBuilder oScratch; + + private final DriverContext driverContext; + + public ToBase64Evaluator(Source source, EvalOperator.ExpressionEvaluator field, + BytesRefBuilder oScratch, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.field = field; + this.oScratch = oScratch; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock fieldBlock = (BytesRefBlock) field.eval(page)) { + BytesRefVector fieldVector = fieldBlock.asVector(); + if (fieldVector == null) { + return eval(page.getPositionCount(), fieldBlock); + } + return eval(page.getPositionCount(), fieldVector); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefBlock fieldBlock) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef fieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (fieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldBlock.getValueCount(p) != 1) { + if (fieldBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBytesRef(ToBase64.process(fieldBlock.getBytesRef(fieldBlock.getFirstValueIndex(p), fieldScratch), oScratch)); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BytesRefBlock eval(int positionCount, BytesRefVector fieldVector) { + try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef fieldScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBytesRef(ToBase64.process(fieldVector.getBytesRef(p, fieldScratch), oScratch)); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "ToBase64Evaluator[" + "field=" + field + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(field); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + private final Function oScratch; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, + Function oScratch) { + this.source = source; + this.field = field; + this.oScratch = oScratch; + } + + @Override + public ToBase64Evaluator get(DriverContext context) { + return new ToBase64Evaluator(source, field.get(context), oScratch.apply(context), context); + } + + @Override + public String toString() { + return "ToBase64Evaluator[" + "field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 9fce2c3ddadd3..9ec0f4514d981 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -21,6 +21,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; @@ -254,6 +256,8 @@ private FunctionDefinition[][] functions() { new FunctionDefinition[] { def(CIDRMatch.class, CIDRMatch::new, "cidr_match") }, // conversion functions new FunctionDefinition[] { + def(FromBase64.class, FromBase64::new, "from_base64"), + def(ToBase64.class, ToBase64::new, "to_base64"), def(ToBoolean.class, ToBoolean::new, "to_boolean", "to_bool"), def(ToCartesianPoint.class, ToCartesianPoint::new, "to_cartesianpoint"), def(ToCartesianShape.class, ToCartesianShape::new, "to_cartesianshape"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java new file mode 100644 index 0000000000000..68856d455663b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.Base64; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; + +public class FromBase64 extends UnaryScalarFunction { + + @FunctionInfo( + returnType = "keyword", + description = "Decode a base64 string.", + examples = @Example(file = "string", tag = "from_base64") + ) + public FromBase64( + Source source, + @Param(name = "string", type = { "keyword", "text" }, description = "A base64 string.") Expression string + ) { + super(source, string); + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + return isString(field, sourceText(), TypeResolutions.ParamOrdinal.DEFAULT); + } + + @Override + public DataType dataType() { + return KEYWORD; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new FromBase64(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, FromBase64::new, field()); + } + + @Evaluator() + static BytesRef process(BytesRef field, @Fixed(includeInToString = false, build = true) BytesRefBuilder oScratch) { + byte[] bytes = new byte[field.length]; + System.arraycopy(field.bytes, field.offset, bytes, 0, field.length); + oScratch.grow(field.length); + oScratch.clear(); + int decodedSize = Base64.getDecoder().decode(bytes, oScratch.bytes()); + return new BytesRef(oScratch.bytes(), 0, decodedSize); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return switch (PlannerUtils.toElementType(field.dataType())) { + case BYTES_REF -> new FromBase64Evaluator.Factory(source(), toEvaluator.apply(field), context -> new BytesRefBuilder()); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java new file mode 100644 index 0000000000000..df21620df7e71 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.Base64; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; + +public class ToBase64 extends UnaryScalarFunction { + + @FunctionInfo( + returnType = "keyword", + description = "Encode a string to a base64 string.", + examples = @Example(file = "string", tag = "to_base64") + ) + public ToBase64(Source source, @Param(name = "string", type = { "keyword", "text" }, description = "A string.") Expression string) { + super(source, string); + + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + return isString(field, sourceText(), TypeResolutions.ParamOrdinal.DEFAULT); + } + + @Override + public DataType dataType() { + return KEYWORD; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToBase64(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToBase64::new, field); + } + + @Evaluator(warnExceptions = { ArithmeticException.class }) + static BytesRef process(BytesRef field, @Fixed(includeInToString = false, build = true) BytesRefBuilder oScratch) { + int outLength = Math.multiplyExact(4, (Math.addExact(field.length, 2) / 3)); + byte[] bytes = new byte[field.length]; + System.arraycopy(field.bytes, field.offset, bytes, 0, field.length); + oScratch.grow(outLength); + oScratch.clear(); + int encodedSize = Base64.getEncoder().encode(bytes, oScratch.bytes()); + return new BytesRef(oScratch.bytes(), 0, encodedSize); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return switch (PlannerUtils.toElementType(field.dataType())) { + case BYTES_REF -> new ToBase64Evaluator.Factory(source(), toEvaluator.apply(field), context -> new BytesRefBuilder()); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index a0fecd731c71c..7e58a5b3fc8cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -42,6 +42,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; @@ -348,6 +350,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Cos.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Cosh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Floor.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, FromBase64.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Length.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Log10.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, LTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -360,6 +363,7 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, StY.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Tan.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Tanh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToBase64.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToBoolean.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToCartesianPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDatetime.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -1297,6 +1301,7 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(Cos.class), Cos::new), entry(name(Cosh.class), Cosh::new), entry(name(Floor.class), Floor::new), + entry(name(FromBase64.class), FromBase64::new), entry(name(Length.class), Length::new), entry(name(Log10.class), Log10::new), entry(name(LTrim.class), LTrim::new), @@ -1310,6 +1315,7 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(StY.class), StY::new), entry(name(Tan.class), Tan::new), entry(name(Tanh.class), Tanh::new), + entry(name(ToBase64.class), ToBase64::new), entry(name(ToBoolean.class), ToBoolean::new), entry(name(ToCartesianPoint.class), ToCartesianPoint::new), entry(name(ToDatetime.class), ToDatetime::new), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 93c0a5946cdf5..28fa34fa2338d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -105,11 +105,17 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature STRING_LITERAL_AUTO_CASTING = new NodeFeature("esql.string_literal_auto_casting"); + /** + * Base64 encoding and decoding functions. + */ + public static final NodeFeature BASE64_DECODE_ENCODE = new NodeFeature("esql.base64_decode_encode"); + @Override public Set getFeatures() { return Set.of( ASYNC_QUERY, AGG_VALUES, + BASE64_DECODE_ENCODE, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java new file mode 100644 index 0000000000000..214b93f68e7d6 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +@FunctionName("from_base64") +public class FromBase64Tests extends AbstractFunctionTestCase { + public FromBase64Tests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD), () -> { + BytesRef input = new BytesRef(randomAlphaOfLength(6)); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(input, DataTypes.KEYWORD, "string")), + "FromBase64Evaluator[field=Attribute[channel=0]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(Base64.getDecoder().decode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT), () -> { + BytesRef input = new BytesRef(randomAlphaOfLength(54)); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(input, DataTypes.TEXT, "string")), + "FromBase64Evaluator[field=Attribute[channel=0]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(Base64.getDecoder().decode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) + ); + })); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new FromBase64(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java new file mode 100644 index 0000000000000..dc3b8aff80c61 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +@FunctionName("to_base64") +public class ToBase64Tests extends AbstractFunctionTestCase { + public ToBase64Tests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + suppliers.add(new TestCaseSupplier(List.of(DataTypes.KEYWORD), () -> { + BytesRef input = (BytesRef) randomLiteral(DataTypes.KEYWORD).value(); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(input, DataTypes.KEYWORD, "string")), + "ToBase64Evaluator[field=Attribute[channel=0]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(Base64.getEncoder().encode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) + ); + })); + + suppliers.add(new TestCaseSupplier(List.of(DataTypes.TEXT), () -> { + BytesRef input = (BytesRef) randomLiteral(DataTypes.TEXT).value(); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(input, DataTypes.TEXT, "string")), + "ToBase64Evaluator[field=Attribute[channel=0]]", + DataTypes.KEYWORD, + equalTo(new BytesRef(Base64.getEncoder().encode(input.utf8ToString().getBytes(StandardCharsets.UTF_8)))) + ); + })); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToBase64(source, args.get(0)); + } +} From 8f2a615cb6ce99eaf07bba31580bceca43156ca7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 15 Apr 2024 19:39:56 -0400 Subject: [PATCH 032/130] Fix ESQL's DivTests (#107492) Sometimes ESQL's Div test generates random division that'll produce an out of range number. They should produce a warning and a `null`, but the tests still thought they should make `Infinity`. This flips the tests around to expecting the right thing in that case. Closes #107475 --- .../function/AbstractFunctionTestCase.java | 2 -- .../expression/function/TestCaseSupplier.java | 14 ++++----- .../operator/arithmetic/AddTests.java | 2 +- .../operator/arithmetic/DivTests.java | 29 ++++++++++++++----- .../operator/arithmetic/ModTests.java | 4 +-- 5 files changed, 31 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 4cecec102799e..0b6c64679dc1f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -312,7 +312,6 @@ public final void testEvaluateBlockWithoutNulls() { * Evaluates a {@link Block} of values, all copied from the input pattern with * some null values inserted between. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107475") public final void testEvaluateBlockWithNulls() { testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); } @@ -496,7 +495,6 @@ public final void testFactoryToString() { assertThat(factory.toString(), testCase.evaluatorToString()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107475") public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index de9d984d0222d..f79816daf690d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -186,7 +186,7 @@ public static List forBinaryCastingToDouble( + "=" + castToDoubleEvaluator("Attribute[channel=1]", rhsType) + "]", - warnings, + (lhs, rhs) -> warnings, suppliers, DataTypes.DOUBLE, false @@ -199,7 +199,7 @@ public static void casesCrossProduct( List lhsSuppliers, List rhsSuppliers, BiFunction evaluatorToString, - List warnings, + BiFunction> warnings, List suppliers, DataType expectedType, boolean symmetric @@ -221,7 +221,7 @@ public static TestCaseSupplier testCaseSupplier( DataType expectedType, BinaryOperator expectedValue ) { - return testCaseSupplier(lhsSupplier, rhsSupplier, evaluatorToString, expectedType, expectedValue, List.of()); + return testCaseSupplier(lhsSupplier, rhsSupplier, evaluatorToString, expectedType, expectedValue, (lhs, rhs) -> List.of()); } private static TestCaseSupplier testCaseSupplier( @@ -230,7 +230,7 @@ private static TestCaseSupplier testCaseSupplier( BiFunction evaluatorToString, DataType expectedType, BinaryOperator expectedValue, - List warnings + BiFunction> warnings ) { String caseName = lhsSupplier.name() + ", " + rhsSupplier.name(); return new TestCaseSupplier(caseName, List.of(lhsSupplier.type(), rhsSupplier.type()), () -> { @@ -242,7 +242,7 @@ private static TestCaseSupplier testCaseSupplier( expectedType, equalTo(expectedValue.apply(lhsTyped.getValue(), rhsTyped.getValue())) ); - for (String warning : warnings) { + for (String warning : warnings.apply(lhsTyped, rhsTyped)) { testCase = testCase.withWarning(warning); } return testCase; @@ -316,7 +316,7 @@ public static List forBinaryWithWidening( NumericTypeTestConfigs typeStuff, String lhsName, String rhsName, - List warnings, + BiFunction> warnings, boolean allowRhsZero ) { List suppliers = new ArrayList<>(); @@ -369,7 +369,7 @@ public static List forBinaryNotCasting( lhsSuppliers, rhsSuppliers, (lhsType, rhsType) -> name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]", - warnings, + (lhs, rhs) -> warnings, suppliers, expectedType, symmetric diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 143f7e5aaba9f..c40d037890d53 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -65,7 +65,7 @@ public static Iterable parameters() { ), "lhs", "rhs", - List.of(), + (lhs, rhs) -> List.of(), true ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index 1f5d57394ff4d..528324f07a086 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -47,16 +47,29 @@ public static Iterable parameters() { (l, r) -> l.longValue() / r.longValue(), "DivLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - (l, r) -> l.doubleValue() / r.doubleValue(), - "DivDoublesEvaluator" - ) + new TestCaseSupplier.NumericTypeTestConfig(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> { + double v = l.doubleValue() / r.doubleValue(); + if (Double.isFinite(v)) { + return v; + } + return null; + }, "DivDoublesEvaluator") ), "lhs", "rhs", - List.of(), + (lhs, rhs) -> { + if (lhs.type() != DataTypes.DOUBLE || rhs.type() != DataTypes.DOUBLE) { + return List.of(); + } + double v = ((Double) lhs.getValue()) / ((Double) rhs.getValue()); + if (Double.isFinite(v)) { + return List.of(); + } + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: / by zero" + ); + }, false ) ); @@ -118,7 +131,7 @@ public static Iterable parameters() { TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), TestCaseSupplier.getSuppliersForNumericType(rhsType, 0, 0, true), evaluatorToString, - List.of( + (lhs, rhs) -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: java.lang.ArithmeticException: / by zero" ), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index 03fbbf6a21ebe..d2af83e91ec64 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -56,7 +56,7 @@ public static Iterable parameters() { ), "lhs", "rhs", - List.of(), + (lhs, rhs) -> List.of(), false ) ); @@ -118,7 +118,7 @@ public static Iterable parameters() { TestCaseSupplier.getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), true), TestCaseSupplier.getSuppliersForNumericType(rhsType, 0, 0, true), evaluatorToString, - List.of( + (lhs, rhs) -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: java.lang.ArithmeticException: / by zero" ), From 2e66a8703b6e81536a0aedf2803a4d6320e68f35 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 16 Apr 2024 11:59:21 +1000 Subject: [PATCH 033/130] Add ability to set project-id in test client (#107341) This adds a "tests.rest.project.id" system property that causes the default test rest client to pass the "X-Elastic-Project-Id" header in HTTP requests --- .../java/org/elasticsearch/test/rest/ESRestTestCase.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 1afedeb37114a..fba04181d5e79 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1452,7 +1452,11 @@ protected Settings restClientSettings() { String username = System.getProperty("tests.rest.cluster.username"); String password = System.getProperty("tests.rest.cluster.password"); String token = basicAuthHeaderValue(username, new SecureString(password.toCharArray())); - return builder.put(ThreadContext.PREFIX + ".Authorization", token).build(); + builder.put(ThreadContext.PREFIX + ".Authorization", token); + } + if (System.getProperty("tests.rest.project.id") != null) { + final var projectId = System.getProperty("tests.rest.project.id"); + builder.put(ThreadContext.PREFIX + ".X-Elastic-Project-Id", projectId); } return builder.build(); } From 08a2f7991e214f2ac8e8b274ac51a540c30b6f3c Mon Sep 17 00:00:00 2001 From: George Katiforis Date: Tue, 16 Apr 2024 09:38:06 +0300 Subject: [PATCH 034/130] Fix regression in get index settings (human=true) where the version was not displayed in human-readable format (#107447) --- docs/changelog/107447.yaml | 5 +++++ .../org/elasticsearch/cluster/metadata/IndexMetadata.java | 2 +- .../cluster/metadata/HumanReadableIndexSettingsTests.java | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107447.yaml diff --git a/docs/changelog/107447.yaml b/docs/changelog/107447.yaml new file mode 100644 index 0000000000000..6ace513013e3e --- /dev/null +++ b/docs/changelog/107447.yaml @@ -0,0 +1,5 @@ +pr: 107447 +summary: "Fix regression in get index settings (human=true) where the version was not displayed in human-readable format" +area: Infra/Core +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 529814e83ba38..560c6815d2252 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -2752,7 +2752,7 @@ public static Settings addHumanReadableSettings(Settings settings) { Settings.Builder builder = Settings.builder().put(settings); IndexVersion version = SETTING_INDEX_VERSION_CREATED.get(settings); if (version.equals(IndexVersions.ZERO) == false) { - builder.put(SETTING_VERSION_CREATED_STRING, version.toString()); + builder.put(SETTING_VERSION_CREATED_STRING, version.toReleaseVersion()); } Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null); if (creationDate != null) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java index 7d2810f11a167..6598bd709c881 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java @@ -30,7 +30,7 @@ public void testHumanReadableSettings() { Settings humanSettings = IndexMetadata.addHumanReadableSettings(testSettings); assertThat(humanSettings.size(), equalTo(4)); - assertEquals(versionCreated.toString(), humanSettings.get(IndexMetadata.SETTING_VERSION_CREATED_STRING, null)); + assertEquals(versionCreated.toReleaseVersion(), humanSettings.get(IndexMetadata.SETTING_VERSION_CREATED_STRING, null)); ZonedDateTime creationDate = ZonedDateTime.ofInstant(Instant.ofEpochMilli(created), ZoneOffset.UTC); assertEquals(creationDate.toString(), humanSettings.get(IndexMetadata.SETTING_CREATION_DATE_STRING, null)); } From a11562c146e780b92f52244ec4a32b42bce34627 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 16 Apr 2024 07:54:51 +0100 Subject: [PATCH 035/130] Document behaviour of `SubscribableListener` on rejection (#107485) It's possible that the `executor` passed in to a `SubscribableListener` rejects the task it should execute. This commit adds to the Javadocs a description of what happens if so. --- .../action/support/SubscribableListener.java | 9 ++++++ .../support/SubscribableListenerTests.java | 32 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index adebcfb4e12cb..1dec470f8c140 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -191,6 +191,10 @@ public final void addListener(ActionListener listener) { *
  • Pass the desired executor in as {@code executor}, and
  • *
  • Invoke {@link #addListener} using that executor.
  • * + *

    + * If {@code executor} rejects the execution of the completion of the subscribing listener then the result is + * discarded and the subscribing listener is completed with a rejection exception on the thread which completes + * this listener. * @param threadContext If not {@code null}, and the subscribing listener is not completed immediately, then it will be completed in * the given thread context. If {@code null}, and the subscribing listener is not completed immediately, then it * will be completed in the {@link ThreadContext} of the completing thread. If the subscribing listener is @@ -451,6 +455,11 @@ public SubscribableListener andThen(CheckedBiConsumer, *

  • Pass the desired executor in as {@code executor}, and
  • *
  • Invoke {@link #andThen} using that executor.
  • * + *

    + * If {@code executor} rejects the execution of {@code nextStep} then the result is discarded and the returned listener is completed + * with a rejection exception on the thread which completes this listener. Likewise if this listener is completed exceptionally but + * {@code executor} rejects the execution of the completion of the returned listener then the returned listener is completed with a + * rejection exception on the thread which completes this listener. */ public SubscribableListener andThen( Executor executor, diff --git a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java index d4a11af0f466f..808d539646550 100644 --- a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java @@ -12,8 +12,10 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -25,6 +27,7 @@ import java.util.HashMap; import java.util.List; import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -607,6 +610,35 @@ public void testAndThenAcceptFailure() { assertComplete(chainedListener, "simulated"); } + public void testRejectedExecutionThreading() { + final var initialListener = new SubscribableListener<>(); + + final Executor rejectingExecutor = r -> asInstanceOf(AbstractRunnable.class, r).onRejection( + new EsRejectedExecutionException("simulated rejection", randomBoolean()) + ); + + final var subscribedListener = SubscribableListener.newForked(l -> initialListener.addListener(l, rejectingExecutor, null)); + final var andThenListener = initialListener.andThen(rejectingExecutor, null, (l, x) -> fail("should not be called")); + + assertFalse(subscribedListener.isDone()); + assertFalse(andThenListener.isDone()); + + // It doesn't matter whether we complete initialListener successfully or exceptionally: either way the completion of the subscribed + // listeners will try and use rejectingExecutor, be rejected, and therefore turn into an onFailure(EsRejectedExecutionException) + // call on this thread instead. + if (randomBoolean()) { + initialListener.onResponse(new Object()); + } else { + initialListener.onFailure(new ElasticsearchException("test")); + } + + assertTrue(subscribedListener.isDone()); + assertTrue(andThenListener.isDone()); + + assertEquals("simulated rejection", expectThrows(EsRejectedExecutionException.class, subscribedListener::rawResult).getMessage()); + assertEquals("simulated rejection", expectThrows(EsRejectedExecutionException.class, andThenListener::rawResult).getMessage()); + } + public void testJavaDocExample() { // Not really testing anything meaningful, this is just here to make sure that the example in the JavaDocs for SubscribableListener // actually compiles and at least vaguely makes sense. From d851c93f763c62296232fe6b59a20339481d1013 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 16 Apr 2024 09:11:24 +0200 Subject: [PATCH 036/130] Invalidating cross cluster API keys requires `manage_security` (#107411) This PR updates the privilege model to require `manage_security` cluster privilege to invalidate cross cluster API keys, to better match the access requirements of the creation and update APIs. Requests made with lower privileges will receive descriptive errors in the response payload indicating failure to invalidate, for each cross cluster API key. There are no changes to invalidating REST API keys, nor to the Query or Get APIs. --- docs/changelog/107411.yaml | 5 + .../xpack/security/apikey/ApiKeyRestIT.java | 184 ++++++++++++++++-- .../TransportInvalidateApiKeyAction.java | 64 ++++-- .../xpack/security/authc/ApiKeyService.java | 66 +++++-- .../security/authc/ApiKeyServiceTests.java | 101 +++++++++- 5 files changed, 379 insertions(+), 41 deletions(-) create mode 100644 docs/changelog/107411.yaml diff --git a/docs/changelog/107411.yaml b/docs/changelog/107411.yaml new file mode 100644 index 0000000000000..fda040bcdab80 --- /dev/null +++ b/docs/changelog/107411.yaml @@ -0,0 +1,5 @@ +pr: 107411 +summary: Invalidating cross cluster API keys requires `manage_security` +area: Security +type: enhancement +issues: [] diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 9c22a6bb4d210..d8e6bc21fb4ed 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -42,6 +42,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE; import static org.elasticsearch.test.SecuritySettingsSourceField.ES_TEST_ROOT_ROLE_DESCRIPTOR; @@ -971,16 +973,21 @@ public void testCreateCrossClusterApiKey() throws IOException { }""", false))); assertThat(fetchResponse.evaluate("api_keys.0.limited_by"), nullValue()); - final Request deleteRequest = new Request("DELETE", "/_security/api_key"); - deleteRequest.setJsonEntity(Strings.format(""" - {"ids": ["%s"]}""", apiKeyId)); - if (randomBoolean()) { - setUserForRequest(deleteRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); - } else { - setUserForRequest(deleteRequest, MANAGE_API_KEY_USER, END_USER_PASSWORD); + // Cannot invalidate cross cluster API keys with manage_api_key + { + final ObjectPath deleteResponse = invalidateApiKeys(MANAGE_API_KEY_USER, apiKeyId); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder(containsString("Cannot invalidate cross-cluster API key [" + apiKeyId + "]")) + ); + } + + { + final ObjectPath deleteResponse = invalidateApiKeys(MANAGE_SECURITY_USER, apiKeyId); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), equalTo(List.of(apiKeyId))); + assertThat(deleteResponse.evaluate("error_count"), equalTo(0)); } - final ObjectPath deleteResponse = assertOKAndCreateObjectPath(client().performRequest(deleteRequest)); - assertThat(deleteResponse.evaluate("invalidated_api_keys"), equalTo(List.of(apiKeyId))); // Cannot create cross-cluster API keys with either manage_api_key or manage_own_api_key privilege if (randomBoolean()) { @@ -993,6 +1000,157 @@ public void testCreateCrossClusterApiKey() throws IOException { assertThat(e.getMessage(), containsString("action [cluster:admin/xpack/security/cross_cluster/api_key/create] is unauthorized")); } + public void testInvalidateCrossClusterApiKeys() throws IOException { + final String id1 = createCrossClusterApiKey(MANAGE_SECURITY_USER); + final String id2 = createCrossClusterApiKey(MANAGE_SECURITY_USER); + final String id3 = createApiKey(MANAGE_API_KEY_USER, "rest-api-key-1", Map.of()).id(); + final String id4 = createApiKey(MANAGE_API_KEY_USER, "rest-api-key-2", Map.of()).id(); + + // `manage_api_key` user cannot delete cross cluster API keys + { + final ObjectPath deleteResponse = invalidateApiKeys(MANAGE_API_KEY_USER, id1, id2); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), is(empty())); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder( + containsString("Cannot invalidate cross-cluster API key [" + id1 + "]"), + containsString("Cannot invalidate cross-cluster API key [" + id2 + "]") + ) + ); + } + + // `manage_api_key` user can delete REST API keys, in mixed request + { + final ObjectPath deleteResponse = invalidateApiKeys(MANAGE_API_KEY_USER, id1, id2, id3); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), contains(id3)); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder( + containsString("Cannot invalidate cross-cluster API key [" + id1 + "]"), + containsString("Cannot invalidate cross-cluster API key [" + id2 + "]") + ) + ); + } + + // `manage_security` user can delete both cross-cluster and REST API keys + { + final ObjectPath deleteResponse = invalidateApiKeys(MANAGE_SECURITY_USER, id1, id2, id4); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), containsInAnyOrder(id1, id2, id4)); + assertThat(deleteResponse.evaluate("error_count"), equalTo(0)); + } + + // owner that loses `manage_security` cannot invalidate cross cluster API key anymore + { + final String user = "temp_manage_security_user"; + createUser(user, END_USER_PASSWORD, List.of("temp_manage_security_role")); + createRole("temp_manage_security_role", Set.of("manage_security")); + final String apiKeyId = createCrossClusterApiKey(user); + + // createRole can also be used to update + createRole("temp_manage_security_role", Set.of("manage_api_key")); + + { + final ObjectPath deleteResponse = invalidateApiKeys(user, apiKeyId); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), is(empty())); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder(containsString("Cannot invalidate cross-cluster API key [" + apiKeyId + "]")) + ); + } + + // also test other invalidation options, e.g., username and realm_name + { + final ObjectPath deleteResponse = invalidateApiKeysWithPayload(user, """ + {"username": "temp_manage_security_user"}"""); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), is(empty())); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder(containsString("Cannot invalidate cross-cluster API key [" + apiKeyId + "]")) + ); + } + + { + final ObjectPath deleteResponse = invalidateApiKeysWithPayload(user, """ + {"realm_name": "default_native"}"""); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), is(empty())); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder(containsString("Cannot invalidate cross-cluster API key [" + apiKeyId + "]")) + ); + } + + { + final ObjectPath deleteResponse = invalidateApiKeysWithPayload(user, """ + {"owner": "true"}"""); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), is(empty())); + final List> errors = deleteResponse.evaluate("error_details"); + assertThat( + getErrorReasons(errors), + containsInAnyOrder(containsString("Cannot invalidate cross-cluster API key [" + apiKeyId + "]")) + ); + } + + { + final ObjectPath deleteResponse = invalidateApiKeysWithPayload(MANAGE_SECURITY_USER, randomFrom(""" + {"username": "temp_manage_security_user"}""", """ + {"realm_name": "default_native"}""", """ + {"realm_name": "default_native", "username": "temp_manage_security_user"}""")); + assertThat(deleteResponse.evaluate("invalidated_api_keys"), containsInAnyOrder(apiKeyId)); + assertThat(deleteResponse.evaluate("error_count"), equalTo(0)); + } + + deleteUser(user); + deleteRole("temp_manage_security_role"); + } + } + + private ObjectPath invalidateApiKeys(String user, String... ids) throws IOException { + return invalidateApiKeysWithPayload(user, Strings.format(""" + {"ids": [%s]}""", Stream.of(ids).map(s -> "\"" + s + "\"").collect(Collectors.joining(",")))); + } + + private ObjectPath invalidateApiKeysWithPayload(String user, String payload) throws IOException { + final Request deleteRequest = new Request("DELETE", "/_security/api_key"); + deleteRequest.setJsonEntity(payload); + setUserForRequest(deleteRequest, user, END_USER_PASSWORD); + return assertOKAndCreateObjectPath(client().performRequest(deleteRequest)); + } + + private static List getErrorReasons(List> errors) { + return errors.stream().map(e -> { + try { + return (String) ObjectPath.evaluate(e, "reason"); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }).collect(Collectors.toList()); + } + + private String createCrossClusterApiKey(String user) throws IOException { + final Request createRequest = new Request("POST", "/_security/cross_cluster/api_key"); + createRequest.setJsonEntity(""" + { + "name": "my-key", + "access": { + "search": [ + { + "names": [ "metrics" ], + "query": "{\\"term\\":{\\"score\\":42}}" + } + ] + } + }"""); + setUserForRequest(createRequest, user, END_USER_PASSWORD); + + final ObjectPath createResponse = assertOKAndCreateObjectPath(client().performRequest(createRequest)); + return createResponse.evaluate("id"); + } + public void testCannotCreateDerivedCrossClusterApiKey() throws IOException { final Request createRestApiKeyRequest = new Request("POST", "_security/api_key"); setUserForRequest(createRestApiKeyRequest, MANAGE_SECURITY_USER, END_USER_PASSWORD); @@ -1751,13 +1909,17 @@ private Response performRequestUsingRandomAuthMethod(final Request request) thro } private EncodedApiKey createApiKey(final String apiKeyName, final Map metadata) throws IOException { + return createApiKey(MANAGE_OWN_API_KEY_USER, apiKeyName, metadata); + } + + private EncodedApiKey createApiKey(final String username, final String apiKeyName, final Map metadata) + throws IOException { final Map createApiKeyRequestBody = Map.of("name", apiKeyName, "metadata", metadata); final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(XContentTestUtils.convertToXContent(createApiKeyRequestBody, XContentType.JSON).utf8ToString()); createApiKeyRequest.setOptions( - RequestOptions.DEFAULT.toBuilder() - .addHeader("Authorization", headerFromRandomAuthMethod(MANAGE_OWN_API_KEY_USER, END_USER_PASSWORD)) + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(username, END_USER_PASSWORD)) ); final Response createApiKeyResponse = client().performRequest(createApiKeyRequest); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportInvalidateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportInvalidateApiKeyAction.java index cd3360c8ab5c2..8b938fed34d56 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportInvalidateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportInvalidateApiKeyAction.java @@ -10,9 +10,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -20,52 +20,88 @@ import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyResponse; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.security.authc.ApiKeyService; public final class TransportInvalidateApiKeyAction extends HandledTransportAction { private final ApiKeyService apiKeyService; private final SecurityContext securityContext; + private final Client client; @Inject public TransportInvalidateApiKeyAction( TransportService transportService, ActionFilters actionFilters, ApiKeyService apiKeyService, - SecurityContext context + SecurityContext context, + Client client ) { super( InvalidateApiKeyAction.NAME, transportService, actionFilters, - (Writeable.Reader) InvalidateApiKeyRequest::new, + InvalidateApiKeyRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.apiKeyService = apiKeyService; this.securityContext = context; + this.client = client; } @Override protected void doExecute(Task task, InvalidateApiKeyRequest request, ActionListener listener) { - String[] apiKeyIds = request.getIds(); - String apiKeyName = request.getName(); - String username = request.getUserName(); - String[] realms = Strings.hasText(request.getRealmName()) ? new String[] { request.getRealmName() } : null; - final Authentication authentication = securityContext.getAuthentication(); if (authentication == null) { listener.onFailure(new IllegalStateException("authentication is required")); + return; } + final String[] apiKeyIds = request.getIds(); + final String apiKeyName = request.getName(); + final String username = getUsername(authentication, request); + final String[] realms = getRealms(authentication, request); + checkHasManageSecurityPrivilege( + ActionListener.wrap( + hasPrivilegesResponse -> apiKeyService.invalidateApiKeys( + realms, + username, + apiKeyName, + apiKeyIds, + hasPrivilegesResponse.isCompleteMatch(), + listener + ), + listener::onFailure + ) + ); + } + + private String getUsername(Authentication authentication, InvalidateApiKeyRequest request) { if (request.ownedByAuthenticatedUser()) { - assert username == null; - assert realms == null; - // restrict username and realm to current authenticated user. - username = authentication.getEffectiveSubject().getUser().principal(); - realms = ApiKeyService.getOwnersRealmNames(authentication); + assert request.getUserName() == null; + return authentication.getEffectiveSubject().getUser().principal(); } + return request.getUserName(); + } - apiKeyService.invalidateApiKeys(realms, username, apiKeyName, apiKeyIds, listener); + private String[] getRealms(Authentication authentication, InvalidateApiKeyRequest request) { + if (request.ownedByAuthenticatedUser()) { + assert request.getRealmName() == null; + return ApiKeyService.getOwnersRealmNames(authentication); + } + return Strings.hasText(request.getRealmName()) ? new String[] { request.getRealmName() } : null; } + private void checkHasManageSecurityPrivilege(ActionListener listener) { + final var hasPrivilegesRequest = new HasPrivilegesRequest(); + hasPrivilegesRequest.username(securityContext.getUser().principal()); + hasPrivilegesRequest.clusterPrivileges(ClusterPrivilegeResolver.MANAGE_SECURITY.name()); + hasPrivilegesRequest.indexPrivileges(new RoleDescriptor.IndicesPrivileges[0]); + hasPrivilegesRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); + client.execute(HasPrivilegesAction.INSTANCE, hasPrivilegesRequest, ActionListener.wrap(listener::onResponse, listener::onFailure)); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index ec0e54e96f1af..ffacd72b05abf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -32,7 +32,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -99,6 +99,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmDomain; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleReference; import org.elasticsearch.xpack.core.security.support.MetadataUtils; @@ -121,6 +122,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -1517,9 +1519,11 @@ private static void addErrorsForNotFoundApiKeys( /** * Invalidate API keys for given realm, user name, API key name and id. * @param realmNames realm names - * @param username user name + * @param username username * @param apiKeyName API key name * @param apiKeyIds API key ids + * @param includeCrossClusterApiKeys whether to include cross-cluster api keys in the invalidation; if false any cross-cluster api keys + * will be skipped. skipped API keys will be included in the error details of the response * @param invalidateListener listener for {@link InvalidateApiKeyResponse} */ public void invalidateApiKeys( @@ -1527,6 +1531,7 @@ public void invalidateApiKeys( String username, String apiKeyName, String[] apiKeyIds, + boolean includeCrossClusterApiKeys, ActionListener invalidateListener ) { ensureEnabled(); @@ -1546,7 +1551,6 @@ public void invalidateApiKeys( apiKeyIds, true, false, - // TODO: instead of parsing the entire API key document, we can just convert the hit to the API key ID this::convertSearchHitToApiKeyInfo, ActionListener.wrap(apiKeys -> { if (apiKeys.isEmpty()) { @@ -1559,7 +1563,7 @@ public void invalidateApiKeys( ); invalidateListener.onResponse(InvalidateApiKeyResponse.emptyResponse()); } else { - indexInvalidation(apiKeys.stream().map(ApiKey::getId).collect(Collectors.toSet()), invalidateListener); + indexInvalidation(apiKeys, includeCrossClusterApiKeys, invalidateListener); } }, invalidateListener::onFailure) ); @@ -1674,22 +1678,47 @@ private void findApiKeysForUserRealmApiKeyIdAndNameCombination( /** * Performs the actual invalidation of a collection of api keys * - * @param apiKeyIds the api keys to invalidate + * @param apiKeys the api keys to invalidate + * @param includeCrossClusterApiKeys whether to include cross-cluster api keys in the invalidation; if false any cross-cluster api keys + * will be skipped. skipped API keys will be included in the error details of the response * @param listener the listener to notify upon completion */ - private void indexInvalidation(Collection apiKeyIds, ActionListener listener) { + private void indexInvalidation( + Collection apiKeys, + boolean includeCrossClusterApiKeys, + ActionListener listener + ) { maybeStartApiKeyRemover(); - if (apiKeyIds.isEmpty()) { + if (apiKeys.isEmpty()) { listener.onFailure(new ElasticsearchSecurityException("No api key ids provided for invalidation")); } else { - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); final long invalidationTime = clock.instant().toEpochMilli(); - for (String apiKeyId : apiKeyIds) { - UpdateRequest request = client.prepareUpdate(SECURITY_MAIN_ALIAS, apiKeyId) - .setDoc(Map.of("api_key_invalidated", true, "invalidation_time", invalidationTime)) - .request(); - bulkRequestBuilder.add(request); + final Set apiKeyIdsToInvalidate = new HashSet<>(); + final Set crossClusterApiKeyIdsToSkip = new HashSet<>(); + final ArrayList failedRequestResponses = new ArrayList<>(); + for (ApiKey apiKey : apiKeys) { + final String apiKeyId = apiKey.getId(); + if (apiKeyIdsToInvalidate.contains(apiKeyId) || crossClusterApiKeyIdsToSkip.contains(apiKeyId)) { + continue; + } + if (false == includeCrossClusterApiKeys && ApiKey.Type.CROSS_CLUSTER.equals(apiKey.getType())) { + logger.debug("Skipping invalidation of cross cluster API key [{}]", apiKey); + failedRequestResponses.add(cannotInvalidateCrossClusterApiKeyException(apiKeyId)); + crossClusterApiKeyIdsToSkip.add(apiKeyId); + } else { + final UpdateRequestBuilder updateRequestBuilder = client.prepareUpdate(SECURITY_MAIN_ALIAS, apiKeyId) + .setDoc(Map.of("api_key_invalidated", true, "invalidation_time", invalidationTime)); + bulkRequestBuilder.add(updateRequestBuilder); + apiKeyIdsToInvalidate.add(apiKeyId); + } } + assert false == apiKeyIdsToInvalidate.isEmpty() || false == crossClusterApiKeyIdsToSkip.isEmpty(); + if (apiKeyIdsToInvalidate.isEmpty()) { + listener.onResponse(new InvalidateApiKeyResponse(Collections.emptyList(), Collections.emptyList(), failedRequestResponses)); + return; + } + assert bulkRequestBuilder.numberOfActions() > 0; bulkRequestBuilder.setRefreshPolicy(defaultCreateDocRefreshPolicy(settings)); securityIndex.prepareIndexIfNeededThenExecute( ex -> listener.onFailure(traceLog("prepare security index", ex)), @@ -1698,7 +1727,6 @@ private void indexInvalidation(Collection apiKeyIds, ActionListenerwrap(bulkResponse -> { - ArrayList failedRequestResponses = new ArrayList<>(); ArrayList previouslyInvalidated = new ArrayList<>(); ArrayList invalidated = new ArrayList<>(); for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { @@ -1734,6 +1762,16 @@ private void indexInvalidation(Collection apiKeyIds, ActionListener listener.get()); @@ -490,7 +491,7 @@ public void testInvalidateApiKeys() throws Exception { username = randomAlphaOfLengthBetween(3, 8); } PlainActionFuture invalidateApiKeyResponseListener = new PlainActionFuture<>(); - service.invalidateApiKeys(realmNames, username, apiKeyName, apiKeyIds, invalidateApiKeyResponseListener); + service.invalidateApiKeys(realmNames, username, apiKeyName, apiKeyIds, true, invalidateApiKeyResponseListener); final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("doc_type", "api_key")); if (realmNames != null && realmNames.length > 0) { if (realmNames.length == 1) { @@ -607,7 +608,103 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { when(clock.instant()).thenReturn(Instant.ofEpochMilli(invalidation)); final ApiKeyService service = createApiKeyService(); PlainActionFuture future = new PlainActionFuture<>(); - service.invalidateApiKeys(null, null, null, new String[] { apiKeyId }, future); + service.invalidateApiKeys(null, null, null, new String[] { apiKeyId }, true, future); + final InvalidateApiKeyResponse invalidateApiKeyResponse = future.actionGet(); + + assertThat(invalidateApiKeyResponse.getInvalidatedApiKeys(), equalTo(List.of(apiKeyId))); + verify(updateRequestBuilder).setDoc( + argThat( + (ArgumentMatcher>) argument -> Map.of("api_key_invalidated", true, "invalidation_time", invalidation) + .equals(argument) + ) + ); + } + + @SuppressWarnings("unchecked") + public void testInvalidateApiKeysWithSkippedCrossClusterKeysAndNullType() { + final int docId = randomIntBetween(0, Integer.MAX_VALUE); + final String apiKeyId = randomAlphaOfLength(20); + + // Mock the search request for keys to invalidate + when(client.threadPool()).thenReturn(threadPool); + when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(new SearchRequestBuilder(client)); + doAnswer(invocation -> { + final var listener = (ActionListener) invocation.getArguments()[1]; + final var searchHit = SearchHit.unpooled(docId, apiKeyId); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + Map apiKeyDocMap = buildApiKeySourceDoc("some_hash".toCharArray()); + // Ensure type is null + apiKeyDocMap.remove("type"); + builder.map(apiKeyDocMap); + searchHit.sourceRef(BytesReference.bytes(builder)); + } + ActionListener.respondAndRelease( + listener, + new SearchResponse( + SearchHits.unpooled( + new SearchHit[] { searchHit }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + randomFloat(), + null, + null, + null + ), + null, + null, + false, + null, + null, + 0, + randomAlphaOfLengthBetween(3, 8), + 1, + 1, + 0, + 10, + null, + null + ) + ); + return null; + }).when(client).search(any(SearchRequest.class), anyActionListener()); + + // Capture the Update request so that we can verify it is configured as expected + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); + final var updateRequestBuilder = Mockito.spy(new UpdateRequestBuilder(client)); + when(client.prepareUpdate(eq(SECURITY_MAIN_ALIAS), eq(apiKeyId))).thenReturn(updateRequestBuilder); + + doAnswer(invocation -> { + final var listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse( + new BulkResponse( + new BulkItemResponse[] { + BulkItemResponse.success( + docId, + DocWriteRequest.OpType.UPDATE, + new UpdateResponse( + mock(ShardId.class), + apiKeyId, + randomLong(), + randomLong(), + randomLong(), + DocWriteResponse.Result.UPDATED + ) + ) }, + randomLongBetween(1, 100) + ) + ); + return null; + }).when(client).bulk(any(BulkRequest.class), anyActionListener()); + doAnswer(invocation -> { + final var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(mock(ClearSecurityCacheResponse.class)); + return null; + }).when(client).execute(eq(ClearSecurityCacheAction.INSTANCE), any(ClearSecurityCacheRequest.class), anyActionListener()); + + final long invalidation = randomMillisUpToYear9999(); + when(clock.instant()).thenReturn(Instant.ofEpochMilli(invalidation)); + final ApiKeyService service = createApiKeyService(); + PlainActionFuture future = new PlainActionFuture<>(); + service.invalidateApiKeys(null, null, null, new String[] { apiKeyId }, false, future); final InvalidateApiKeyResponse invalidateApiKeyResponse = future.actionGet(); assertThat(invalidateApiKeyResponse.getInvalidatedApiKeys(), equalTo(List.of(apiKeyId))); From 2e847e8817c47197a89ba421d89fdc289976b1ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 16 Apr 2024 09:39:36 +0200 Subject: [PATCH 037/130] [DOCS] Documents the rerank task type of the Inference API (#107404) * [DOCS] Documents the rerank task type of the Inference API. --- .../inference/post-inference.asciidoc | 137 ++++++++++++++---- .../inference/put-inference.asciidoc | 97 +++++++++---- 2 files changed, 180 insertions(+), 54 deletions(-) diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 023566d3e40ee..5a9ae283e895c 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -69,27 +69,40 @@ seconds. ==== {api-request-body-title} `input`:: -(Required, array of strings) +(Required, string or array of strings) The text on which you want to perform the {infer} task. `input` can be a single string or an array. ++ +-- [NOTE] ==== -Inference endpoints for the `completion` task type currently only support a single string as input. +Inference endpoints for the `completion` task type currently only support a +single string as input. ==== +-- + +`query`:: +(Required, string) +Only for `rerank` {infer} endpoints. The search query text. [discrete] [[post-inference-api-example]] ==== {api-examples-title} -The following example performs sparse embedding on the example sentence. + +[discrete] +[[inference-example-completion]] +===== Completion example + +The following example performs a completion on the example question. [source,console] ------------------------------------------------------------ -POST _inference/sparse_embedding/my-elser-model +POST _inference/completion/openai_chat_completions { - "input": "The sky above the port was the color of television tuned to a dead channel." + "input": "What is Elastic?" } ------------------------------------------------------------ // TEST[skip:TBD] @@ -101,39 +114,90 @@ The API returns the following response: [source,console-result] ------------------------------------------------------------ { - "sparse_embedding": [ + "completion": [ { - "port": 2.1259406, - "sky": 1.7073475, - "color": 1.6922266, - "dead": 1.6247464, - "television": 1.3525393, - "above": 1.2425821, - "tuned": 1.1440028, - "colors": 1.1218185, - "tv": 1.0111054, - "ports": 1.0067928, - "poem": 1.0042328, - "channel": 0.99471164, - "tune": 0.96235967, - "scene": 0.9020516, - (...) - }, - (...) + "result": "Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management." + } ] } ------------------------------------------------------------ // NOTCONSOLE +[discrete] +[[inference-example-rerank]] +===== Rerank example -The next example performs a completion on the example question. +The following example performs reranking on the example input. + +[source,console] +------------------------------------------------------------ +POST _inference/rerank/cohere_rerank +{ + "input": ["luke", "like", "leia", "chewy","r2d2", "star", "wars"], + "query": "star wars main character" +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The API returns the following response: + + +[source,console-result] +------------------------------------------------------------ +{ + "rerank": [ + { + "index": "2", + "relevance_score": "0.011597361", + "text": "leia" + }, + { + "index": "0", + "relevance_score": "0.006338922", + "text": "luke" + }, + { + "index": "5", + "relevance_score": "0.0016166499", + "text": "star" + }, + { + "index": "4", + "relevance_score": "0.0011695103", + "text": "r2d2" + }, + { + "index": "1", + "relevance_score": "5.614787E-4", + "text": "like" + }, + { + "index": "6", + "relevance_score": "3.7850367E-4", + "text": "wars" + }, + { + "index": "3", + "relevance_score": "1.2508839E-5", + "text": "chewy" + } + ] +} +------------------------------------------------------------ + + +[discrete] +[[inference-example-sparse]] +===== Sparse embedding example + +The following example performs sparse embedding on the example sentence. [source,console] ------------------------------------------------------------ -POST _inference/completion/openai_chat_completions +POST _inference/sparse_embedding/my-elser-model { - "input": "What is Elastic?" + "input": "The sky above the port was the color of television tuned to a dead channel." } ------------------------------------------------------------ // TEST[skip:TBD] @@ -145,10 +209,25 @@ The API returns the following response: [source,console-result] ------------------------------------------------------------ { - "completion": [ + "sparse_embedding": [ { - "result": "Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management." - } + "port": 2.1259406, + "sky": 1.7073475, + "color": 1.6922266, + "dead": 1.6247464, + "television": 1.3525393, + "above": 1.2425821, + "tuned": 1.1440028, + "colors": 1.1218185, + "tv": 1.0111054, + "ports": 1.0067928, + "poem": 1.0042328, + "channel": 0.99471164, + "tune": 0.96235967, + "scene": 0.9020516, + (...) + }, + (...) ] } ------------------------------------------------------------ diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 9825e71adea0d..332752e52f068 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -57,24 +57,27 @@ The unique identifier of the {infer} endpoint. ``:: (Required, string) The type of the {infer} task that the model will perform. Available task types: +* `completion`, +* `rerank`, * `sparse_embedding`, -* `text_embedding`, -* `completion` +* `text_embedding`. [discrete] [[put-inference-api-request-body]] -== {api-request-body-title} +==== {api-request-body-title} `service`:: (Required, string) The type of service supported for the specified task type. Available services: -* `cohere`: specify the `text_embedding` task type to use the Cohere service. +* `cohere`: specify the `text_embedding` or the `rerank` task type to use the +Cohere service. * `elser`: specify the `sparse_embedding` task type to use the ELSER service. * `hugging_face`: specify the `text_embedding` task type to use the Hugging Face service. -* `openai`: specify the `text_embedding` task type to use the OpenAI service. +* `openai`: specify the `completion` or `text_embedding` task type to use the +OpenAI service. * `elasticsearch`: specify the `text_embedding` task type to use the E5 built-in model or text embedding models uploaded by Eland. @@ -100,7 +103,8 @@ the same name and the updated API key. `embedding_type`:: (Optional, string) -Specifies the types of embeddings you want to get back. Defaults to `float`. +Only for `text_embedding`. Specifies the types of embeddings you want to get +back. Defaults to `float`. Valid values are: * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). * `float`: use it for the default float embeddings. @@ -108,10 +112,13 @@ Valid values are: `model_id`:: (Optional, string) -The name of the model to use for the {infer} task. To review the available -models, refer to the -https://docs.cohere.com/reference/embed[Cohere docs]. Defaults to -`embed-english-v2.0`. +The name of the model to use for the {infer} task. +To review the availble `rerank` models, refer to the +https://docs.cohere.com/reference/rerank-1[Cohere docs]. + +To review the available `text_embedding` models, refer to the +https://docs.cohere.com/reference/embed[Cohere docs]. The default value for +`text_embedding` is `embed-english-v2.0`. ===== + .`service_settings` for the `elser` service @@ -210,11 +217,34 @@ allocations. Must be a power of 2. Max allowed value is 32. Settings to configure the {infer} task. These settings are specific to the `` you specified. + +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`user`::: +(Optional, string) +For `openai` service only. Specifies the user issuing the request, which can be +used for abuse detection. +===== ++ +.`task_settings` for the `rerank` task type +[%collapsible%closed] +===== +`return_documents`:: +(Optional, boolean) +For `cohere` service only. Specify whether to return doc text within the +results. + +`top_n`:: +(Optional, integer) +The number of most relevant documents to return, defaults to the number of the +documents. +===== ++ .`task_settings` for the `text_embedding` task type [%collapsible%closed] ===== `input_type`::: -(optional, string) +(Optional, string) For `cohere` service only. Specifies the type of input passed to the model. Valid values are: * `classification`: use it for embeddings passed through a text classifier. @@ -236,15 +266,8 @@ maximum token length. Defaults to `END`. Valid values are: `user`::: (optional, string) -For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. -===== -+ -.`task_settings` for the `completion` task type -[%collapsible%closed] -===== -`user`::: -(optional, string) -For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. +For `openai` service only. Specifies the user issuing the request, which can be +used for abuse detection. ===== @@ -260,7 +283,7 @@ This section contains example API calls for every service type. ===== Cohere service The following example shows how to create an {infer} endpoint called -`cohere_embeddings` to perform a `text_embedding` task type. +`cohere-embeddings` to perform a `text_embedding` task type. [source,console] ------------------------------------------------------------ @@ -277,6 +300,30 @@ PUT _inference/text_embedding/cohere-embeddings // TEST[skip:TBD] +The following example shows how to create an {infer} endpoint called +`cohere-rerank` to perform a `rerank` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/rerank/cohere-rerank +{ + "service": "cohere", + "service_settings": { + "api_key": "", + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + "return_documents": true + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +For more examples, also review the +https://docs.cohere.com/docs/elasticsearch-and-cohere#rerank-search-results-with-cohere-and-elasticsearch[Cohere documentation]. + + [discrete] [[inference-example-e5]] ===== E5 via the elasticsearch service @@ -414,11 +461,11 @@ been ===== OpenAI service The following example shows how to create an {infer} endpoint called -`openai_embeddings` to perform a `text_embedding` task type. +`openai-embeddings` to perform a `text_embedding` task type. [source,console] ------------------------------------------------------------ -PUT _inference/text_embedding/openai_embeddings +PUT _inference/text_embedding/openai-embeddings { "service": "openai", "service_settings": { @@ -430,11 +477,11 @@ PUT _inference/text_embedding/openai_embeddings // TEST[skip:TBD] The next example shows how to create an {infer} endpoint called -`openai_completion` to perform a `completion` task type. +`openai-completion` to perform a `completion` task type. [source,console] ------------------------------------------------------------ -PUT _inference/completion/openai_completion +PUT _inference/completion/openai-completion { "service": "openai", "service_settings": { From d11048e7ff24ac8f9995a3c0d24da71117ceb7b5 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 16 Apr 2024 11:34:18 +0300 Subject: [PATCH 038/130] Mute DocketTests.test150MachineDependentHeap (#107509) Related to #104786 --- .../test/java/org/elasticsearch/packaging/test/DockerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index d0ff4d2d986e6..ece49fbb15e5d 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1011,6 +1011,7 @@ public void test140CgroupOsStatsAreAvailable() throws Exception { * Check that when available system memory is constrained by Docker, the machine-dependant heap sizing * logic sets the correct heap size, based on the container limits. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104786") public void test150MachineDependentHeap() throws Exception { final List xArgs = machineDependentHeapTest("942m", List.of()); From 4cca544ccb0c12e9c35cd7f1ba49c65d84456ba0 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 16 Apr 2024 11:11:38 +0200 Subject: [PATCH 039/130] Custom Roles - extension point for grant API key request translation (#107378) Extension point to override Grant API key request translation (i.e., parsing) behavior. --- .../apikey/CreateApiKeyRequestBuilder.java | 2 +- .../security/src/main/java/module-info.java | 1 + .../xpack/security/Security.java | 7 +- .../action/apikey/RestGrantApiKeyAction.java | 133 +++++++++++------- .../apikey/RestGrantApiKeyActionTests.java | 2 +- 5 files changed, 88 insertions(+), 57 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java index 5c156ab4e6166..a79b3c74db006 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java @@ -36,7 +36,7 @@ public class CreateApiKeyRequestBuilder extends ActionRequestBuilder createParser( + public static ConstructingObjectParser createParser( CheckedBiFunction roleDescriptorParser ) { ConstructingObjectParser parser = new ConstructingObjectParser<>( diff --git a/x-pack/plugin/security/src/main/java/module-info.java b/x-pack/plugin/security/src/main/java/module-info.java index 557d601579af8..a072b34da7e96 100644 --- a/x-pack/plugin/security/src/main/java/module-info.java +++ b/x-pack/plugin/security/src/main/java/module-info.java @@ -69,6 +69,7 @@ exports org.elasticsearch.xpack.security.authc to org.elasticsearch.xcontent; exports org.elasticsearch.xpack.security.slowlog to org.elasticsearch.server; exports org.elasticsearch.xpack.security.authc.support to org.elasticsearch.internal.security; + exports org.elasticsearch.xpack.security.rest.action.apikey to org.elasticsearch.internal.security; provides org.elasticsearch.index.SlowLogFieldProvider with org.elasticsearch.xpack.security.slowlog.SecuritySlowLogFieldProvider; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 837c58ab6542d..50c6821a68cab 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -578,6 +578,7 @@ public class Security extends Plugin private final SetOnce createApiKeyRequestBuilderFactory = new SetOnce<>(); private final SetOnce updateApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce bulkUpdateApiKeyRequestTranslator = new SetOnce<>(); + private final SetOnce grantApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce getBuiltinPrivilegesResponseTranslator = new SetOnce<>(); private final SetOnce hasPrivilegesRequestBuilderFactory = new SetOnce<>(); private final SetOnce fileRolesStore = new SetOnce<>(); @@ -854,6 +855,9 @@ Collection createComponents( if (bulkUpdateApiKeyRequestTranslator.get() == null) { bulkUpdateApiKeyRequestTranslator.set(new BulkUpdateApiKeyRequestTranslator.Default()); } + if (grantApiKeyRequestTranslator.get() == null) { + grantApiKeyRequestTranslator.set(new RestGrantApiKeyAction.RequestTranslator.Default()); + } if (hasPrivilegesRequestBuilderFactory.get() == null) { hasPrivilegesRequestBuilderFactory.trySet(new HasPrivilegesRequestBuilderFactory.Default()); } @@ -1510,7 +1514,7 @@ public List getRestHandlers( new RestUpdateApiKeyAction(settings, getLicenseState(), updateApiKeyRequestTranslator.get()), new RestBulkUpdateApiKeyAction(settings, getLicenseState(), bulkUpdateApiKeyRequestTranslator.get()), new RestUpdateCrossClusterApiKeyAction(settings, getLicenseState()), - new RestGrantApiKeyAction(settings, getLicenseState()), + new RestGrantApiKeyAction(settings, getLicenseState(), grantApiKeyRequestTranslator.get()), new RestInvalidateApiKeyAction(settings, getLicenseState()), new RestGetApiKeyAction(settings, getLicenseState()), new RestQueryApiKeyAction(settings, getLicenseState()), @@ -2125,6 +2129,7 @@ public void loadExtensions(ExtensionLoader loader) { loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); loadSingletonExtensionAndSetOnce(loader, reservedRoleNameCheckerFactory, ReservedRoleNameChecker.Factory.class); + loadSingletonExtensionAndSetOnce(loader, grantApiKeyRequestTranslator, RestGrantApiKeyAction.RequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, fileRoleValidator, FileRoleValidator.class); loadSingletonExtensionAndSetOnce(loader, secondaryAuthActions, SecondaryAuthActions.class); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java index a3b1ad86a7166..8fda0f0518c93 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -19,13 +20,16 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequestBuilder; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyRequest; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; @@ -42,38 +46,65 @@ */ @ServerlessScope(Scope.INTERNAL) public final class RestGrantApiKeyAction extends ApiKeyBaseRestHandler implements RestRequestFilter { + public interface RequestTranslator { + GrantApiKeyRequest translate(RestRequest request) throws IOException; - static final ObjectParser PARSER = new ObjectParser<>("grant_api_key_request", GrantApiKeyRequest::new); - static { - PARSER.declareString((req, str) -> req.getGrant().setType(str), new ParseField("grant_type")); - PARSER.declareString((req, str) -> req.getGrant().setUsername(str), new ParseField("username")); - PARSER.declareField( - (req, secStr) -> req.getGrant().setPassword(secStr), - SecurityBaseRestHandler::getSecureString, - new ParseField("password"), - ObjectParser.ValueType.STRING - ); - PARSER.declareField( - (req, secStr) -> req.getGrant().setAccessToken(secStr), - SecurityBaseRestHandler::getSecureString, - new ParseField("access_token"), - ObjectParser.ValueType.STRING - ); - PARSER.declareString((req, str) -> req.getGrant().setRunAsUsername(str), new ParseField("run_as")); - PARSER.declareObject( - (req, clientAuthentication) -> req.getGrant().setClientAuthentication(clientAuthentication), - CLIENT_AUTHENTICATION_PARSER, - new ParseField("client_authentication") - ); - PARSER.declareObject( - (req, api) -> req.setApiKeyRequest(api), - (parser, ignore) -> CreateApiKeyRequestBuilder.parse(parser), - new ParseField("api_key") - ); + class Default implements RequestTranslator { + private static final ObjectParser PARSER = createParser((n, p) -> RoleDescriptor.parse(n, p, false)); + + protected static ObjectParser createParser( + CheckedBiFunction roleDescriptorParser + ) { + final ConstructingObjectParser apiKeyParser = CreateApiKeyRequestBuilder.createParser( + roleDescriptorParser + ); + final ObjectParser parser = new ObjectParser<>("grant_api_key_request", GrantApiKeyRequest::new); + parser.declareString((req, str) -> req.getGrant().setType(str), new ParseField("grant_type")); + parser.declareString((req, str) -> req.getGrant().setUsername(str), new ParseField("username")); + parser.declareField( + (req, secStr) -> req.getGrant().setPassword(secStr), + SecurityBaseRestHandler::getSecureString, + new ParseField("password"), + ObjectParser.ValueType.STRING + ); + parser.declareField( + (req, secStr) -> req.getGrant().setAccessToken(secStr), + SecurityBaseRestHandler::getSecureString, + new ParseField("access_token"), + ObjectParser.ValueType.STRING + ); + parser.declareString((req, str) -> req.getGrant().setRunAsUsername(str), new ParseField("run_as")); + parser.declareObject( + (req, clientAuthentication) -> req.getGrant().setClientAuthentication(clientAuthentication), + CLIENT_AUTHENTICATION_PARSER, + new ParseField("client_authentication") + ); + parser.declareObject( + GrantApiKeyRequest::setApiKeyRequest, + (p, ignore) -> apiKeyParser.parse(p, null), + new ParseField("api_key") + ); + return parser; + } + + @Override + public GrantApiKeyRequest translate(RestRequest request) throws IOException { + try (XContentParser parser = request.contentParser()) { + return fromXContent(parser); + } + } + + public static GrantApiKeyRequest fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + } } - public RestGrantApiKeyAction(Settings settings, XPackLicenseState licenseState) { + private final RequestTranslator requestTranslator; + + public RestGrantApiKeyAction(Settings settings, XPackLicenseState licenseState, RequestTranslator requestTranslator) { super(settings, licenseState); + this.requestTranslator = requestTranslator; } @Override @@ -86,35 +117,29 @@ public String getName() { return "xpack_security_grant_api_key"; } - public static GrantApiKeyRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override protected RestChannelConsumer innerPrepareRequest(final RestRequest request, final NodeClient client) throws IOException { - String refresh = request.param("refresh"); - try (XContentParser parser = request.contentParser()) { - final GrantApiKeyRequest grantRequest = fromXContent(parser); - if (refresh != null) { - grantRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.parse(refresh)); - } else { - grantRequest.setRefreshPolicy(ApiKeyService.defaultCreateDocRefreshPolicy(settings)); - } - return channel -> client.execute( - GrantApiKeyAction.INSTANCE, - grantRequest, - new RestToXContentListener(channel).delegateResponse((listener, ex) -> { - RestStatus status = ExceptionsHelper.status(ex); - if (status == RestStatus.UNAUTHORIZED) { - listener.onFailure( - new ElasticsearchSecurityException("Failed to authenticate api key grant", RestStatus.FORBIDDEN, ex) - ); - } else { - listener.onFailure(ex); - } - }) - ); + final GrantApiKeyRequest grantRequest = requestTranslator.translate(request); + final String refresh = request.param("refresh"); + if (refresh != null) { + grantRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.parse(refresh)); + } else { + grantRequest.setRefreshPolicy(ApiKeyService.defaultCreateDocRefreshPolicy(settings)); } + return channel -> client.execute( + GrantApiKeyAction.INSTANCE, + grantRequest, + new RestToXContentListener(channel).delegateResponse((listener, ex) -> { + RestStatus status = ExceptionsHelper.status(ex); + if (status == RestStatus.UNAUTHORIZED) { + listener.onFailure( + new ElasticsearchSecurityException("Failed to authenticate api key grant", RestStatus.FORBIDDEN, ex) + ); + } else { + listener.onFailure(ex); + } + }) + ); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyActionTests.java index e6744544a34da..411b752ceacb1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyActionTests.java @@ -48,7 +48,7 @@ public void testParseXContentForGrantApiKeyRequest() throws Exception { .endObject() ) ) { - GrantApiKeyRequest grantApiKeyRequest = RestGrantApiKeyAction.fromXContent(content); + GrantApiKeyRequest grantApiKeyRequest = RestGrantApiKeyAction.RequestTranslator.Default.fromXContent(content); assertThat(grantApiKeyRequest.getGrant().getType(), is(grantType)); assertThat(grantApiKeyRequest.getGrant().getUsername(), is(username)); assertThat(grantApiKeyRequest.getGrant().getPassword(), is(new SecureString(password.toCharArray()))); From dbd4c1360548027187c0225d64fa8f16590ca1dc Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 16 Apr 2024 12:02:37 +0200 Subject: [PATCH 040/130] Ensure executor is closed in case of failure (#107511) This change ensures that executor is closed even in case of assertion failure to prevent leaking threads. --- .../index/shard/IndexShardTests.java | 44 ++++++++++--------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 091673e3eb0cd..c9f8372f54793 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -4011,28 +4011,30 @@ public void testFlushTimeExcludingWaiting() throws Exception { int numFlushes = randomIntBetween(2, 5); var flushesLatch = new CountDownLatch(numFlushes); var executor = Executors.newFixedThreadPool(numFlushes); - for (int i = 0; i < numFlushes; i++) { - executor.submit(() -> { - shard.flush(new FlushRequest().waitIfOngoing(true).force(true)); - flushesLatch.countDown(); - }); - } - safeAwait(flushesLatch); - - FlushStats flushStats = shard.flushStats(); - assertThat( - "Flush time excluding waiting should be captured", - flushStats.getTotalTimeExcludingWaitingOnLockMillis(), - greaterThan(0L) - ); - assertThat( - "Flush time excluding waiting should less than flush time with waiting", - flushStats.getTotalTimeExcludingWaitingOnLockMillis(), - lessThan(flushStats.getTotalTime().millis()) - ); + try { + for (int i = 0; i < numFlushes; i++) { + executor.submit(() -> { + shard.flush(new FlushRequest().waitIfOngoing(true).force(true)); + flushesLatch.countDown(); + }); + } + safeAwait(flushesLatch); - closeShards(shard); - executor.shutdown(); + FlushStats flushStats = shard.flushStats(); + assertThat( + "Flush time excluding waiting should be captured", + flushStats.getTotalTimeExcludingWaitingOnLockMillis(), + greaterThan(0L) + ); + assertThat( + "Flush time excluding waiting should less than flush time with waiting", + flushStats.getTotalTimeExcludingWaitingOnLockMillis(), + lessThan(flushStats.getTotalTime().millis()) + ); + } finally { + closeShards(shard); + executor.shutdown(); + } } @TestLogging(reason = "testing traces of concurrent flushes", value = "org.elasticsearch.index.engine.Engine:TRACE") From 9626615fc3cd471f9143edd3b90c92827ddcec6c Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 16 Apr 2024 11:53:08 +0100 Subject: [PATCH 041/130] Improve failure message for InternalTestCluster#getInstance (#107398) Today if `InternalTestCluster#getInstance` fails to find a matching instance it throws an opaque `AssertionError`. This commit adds a message describing the problem to make troubleshooting easier. Relates #107392 --- .../test/InternalTestCluster.java | 56 +++++++++++++------ 1 file changed, 39 insertions(+), 17 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 949679280a53b..53601caa8a1d2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -176,13 +176,29 @@ public final class InternalTestCluster extends TestCluster { private static final Logger logger = LogManager.getLogger(InternalTestCluster.class); - private static final Predicate DATA_NODE_PREDICATE = nodeAndClient -> DiscoveryNode.canContainData( - nodeAndClient.node.settings() - ); + private static final Predicate DATA_NODE_PREDICATE = new Predicate<>() { + @Override + public boolean test(NodeAndClient nodeAndClient) { + return DiscoveryNode.canContainData(nodeAndClient.node.settings()); + } + + @Override + public String toString() { + return "any data node"; + } + }; - private static final Predicate MASTER_NODE_PREDICATE = nodeAndClient -> DiscoveryNode.isMasterNode( - nodeAndClient.node.settings() - ); + private static final Predicate MASTER_NODE_PREDICATE = new Predicate<>() { + @Override + public boolean test(NodeAndClient nodeAndClient) { + return DiscoveryNode.isMasterNode(nodeAndClient.node.settings()); + } + + @Override + public String toString() { + return "any master-eligible node"; + } + }; private static final Predicate NO_DATA_NO_MASTER_PREDICATE = DATA_NODE_PREDICATE.negate() .and(MASTER_NODE_PREDICATE.negate()); @@ -1589,15 +1605,25 @@ private Iterable getInstances(Class clazz, Predicate pr /** * @return the instance of the given class from the node with provided {@code nodeName} */ - public T getInstance(Class clazz, final String nodeName) { - return getInstance(clazz, nc -> nodeName == null || nodeName.equals(nc.name)); + public T getInstance(Class clazz, @Nullable final String nodeName) { + return getInstance(clazz, nodeName == null ? Predicates.always() : new NodeNamePredicate(nodeName)); } /** * @return the instance of the given class from a random node with provided {@code role} */ public T getInstance(Class clazz, DiscoveryNodeRole role) { - return getInstance(clazz, nc -> DiscoveryNode.getRolesFromSettings(nc.node.settings()).contains(role)); + return getInstance(clazz, new Predicate<>() { + @Override + public boolean test(NodeAndClient nc) { + return DiscoveryNode.getRolesFromSettings(nc.node.settings()).contains(role); + } + + @Override + public String toString() { + return "role: " + role; + } + }); } public T getDataNodeInstance(Class clazz) { @@ -1614,7 +1640,9 @@ public T getAnyMasterNodeInstance(Class clazz) { private synchronized T getInstance(Class clazz, Predicate predicate) { NodeAndClient randomNodeAndClient = getRandomNodeAndClient(predicate); - assert randomNodeAndClient != null; + if (randomNodeAndClient == null) { + throw new AssertionError("no node matches [" + predicate + "]"); + } return getInstanceFromNode(clazz, randomNodeAndClient.node); } @@ -2296,13 +2324,7 @@ private static Collection filterNodes( return map.values().stream().filter(predicate).collect(Collectors.toCollection(ArrayList::new)); } - private static final class NodeNamePredicate implements Predicate { - private final String nodeName; - - NodeNamePredicate(String nodeName) { - this.nodeName = nodeName; - } - + private record NodeNamePredicate(String nodeName) implements Predicate { @Override public boolean test(NodeAndClient nodeAndClient) { return nodeName.equals(nodeAndClient.getName()); From a2c2e8fe4799f34fe4bf98c32ab7fa7bdb4838d4 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 16 Apr 2024 12:57:18 +0200 Subject: [PATCH 042/130] ESQL: extend BUCKET with spans. Turn it into a grouping function (#107272) This extends `BUCKET` function to accept a two-parameters-only invocation: the first parameter remains as is, while the second is a span. It can be a numeric (floating point) span, if the first argument is numeric, or a date period or time duration, if the first argument is a date. Also, the function can now be invoked with the alias BIN. Additionally, the function has been turned into a grouping-only function and thus can only be used within a `STATS` command. --- docs/changelog/107272.yaml | 5 + docs/reference/esql/esql-get-started.asciidoc | 6 +- docs/reference/esql/functions/bucket.asciidoc | 22 +- .../functions/description/bucket.asciidoc | 2 +- .../functions/kibana/definition/bucket.json | 198 ++++++-- .../esql/functions/kibana/docs/bucket.md | 4 +- .../src/main/resources/bucket.csv-spec | 475 ++++++++++++++++++ .../src/main/resources/date.csv-spec | 276 ---------- .../src/main/resources/floats.csv-spec | 22 - .../src/main/resources/ints.csv-spec | 50 -- .../src/main/resources/meta.csv-spec | 16 +- .../src/main/resources/unsigned_long.csv-spec | 14 - .../xpack/esql/analysis/Verifier.java | 45 +- .../function/EsqlFunctionRegistry.java | 5 +- .../{scalar/math => grouping}/Bucket.java | 144 ++++-- .../function/grouping/GroupingFunction.java | 33 ++ .../xpack/esql/io/stream/PlanNamedTypes.java | 18 +- .../xpack/esql/analysis/VerifierTests.java | 68 +++ .../function/AbstractFunctionTestCase.java | 10 +- .../function/scalar/math/BucketTests.java | 77 ++- .../optimizer/LogicalPlanOptimizerTests.java | 8 +- .../xpack/ql/expression/Expression.java | 5 + 22 files changed, 995 insertions(+), 508 deletions(-) create mode 100644 docs/changelog/107272.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/{scalar/math => grouping}/Bucket.java (59%) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java diff --git a/docs/changelog/107272.yaml b/docs/changelog/107272.yaml new file mode 100644 index 0000000000000..eb9e0c5e8bab8 --- /dev/null +++ b/docs/changelog/107272.yaml @@ -0,0 +1,5 @@ +pr: 107272 +summary: "ESQL: extend BUCKET with spans" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 421272f741602..351a8efdc8ae9 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -248,7 +248,7 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-bucket] +include::{esql-specs}/bucket.csv-spec[tag=gs-bucket] ---- Combine `BUCKET` with <> to create a histogram. For example, @@ -256,14 +256,14 @@ to count the number of events per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by] +include::{esql-specs}/bucket.csv-spec[tag=gs-bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -include::{esql-specs}/date.csv-spec[tag=gs-bucket-stats-by-median] +include::{esql-specs}/bucket.csv-spec[tag=gs-bucket-stats-by-median] ---- [discrete] diff --git a/docs/reference/esql/functions/bucket.asciidoc b/docs/reference/esql/functions/bucket.asciidoc index e436a79d0ec1e..114d28de9087d 100644 --- a/docs/reference/esql/functions/bucket.asciidoc +++ b/docs/reference/esql/functions/bucket.asciidoc @@ -35,11 +35,11 @@ in monthly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsBucketMonth] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketMonth] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsBucketMonth-result] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketMonth-result] |=== The goal isn't to provide *exactly* the target number of buckets, it's to pick a @@ -51,11 +51,11 @@ Combine `BUCKET` with [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketMonthlyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsBucketMonthlyHistogram-result] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketMonthlyHistogram-result] |=== NOTE: `BUCKET` does not create buckets that don't match any documents. @@ -66,11 +66,11 @@ at most 100 buckets in a year results in weekly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=docsBucketWeeklyHistogram-result] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogram-result] |=== NOTE: `BUCKET` does not filter any rows. It only uses the provided range to @@ -83,11 +83,11 @@ salary histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketNumeric] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/ints.csv-spec[tag=docsBucketNumeric-result] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketNumeric-result] |=== Unlike the earlier example that intentionally filters on a date range, you @@ -102,7 +102,7 @@ per hour: [source.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=docsBucketLast24hr] +include::{esql-specs}/bucket.csv-spec[tag=docsBucketLast24hr] ---- Create monthly buckets for the year 1985, and calculate the average salary by @@ -110,9 +110,9 @@ hiring month: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=bucket_in_agg] +include::{esql-specs}/bucket.csv-spec[tag=bucket_in_agg] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=bucket_in_agg-result] +include::{esql-specs}/bucket.csv-spec[tag=bucket_in_agg-result] |=== diff --git a/docs/reference/esql/functions/description/bucket.asciidoc b/docs/reference/esql/functions/description/bucket.asciidoc index 0c1d9d3ea1ffd..cc92ae4cd21e8 100644 --- a/docs/reference/esql/functions/description/bucket.asciidoc +++ b/docs/reference/esql/functions/description/bucket.asciidoc @@ -2,4 +2,4 @@ *Description* -Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. +Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. diff --git a/docs/reference/esql/functions/kibana/definition/bucket.json b/docs/reference/esql/functions/kibana/definition/bucket.json index 050c334ac7e6e..2c0b6dfbf38c3 100644 --- a/docs/reference/esql/functions/kibana/definition/bucket.json +++ b/docs/reference/esql/functions/kibana/definition/bucket.json @@ -2,8 +2,26 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "bucket", - "description" : "Creates human-friendly buckets and returns a datetime value\nfor each row that corresponds to the resulting bucket the row falls into.", + "description" : "Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either\nbe provided directly, or chosen based on a recommended count and values range.", "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "date_period", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, { "params" : [ { @@ -21,12 +39,30 @@ { "name" : "from", "type" : "datetime", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "datetime", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "time_duration", "optional" : false, "description" : "" } @@ -34,6 +70,24 @@ "variadic" : false, "returnType" : "datetime" }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "double", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, { "params" : [ { @@ -51,13 +105,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -81,13 +135,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -111,13 +165,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -141,13 +195,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -171,13 +225,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -201,13 +255,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -231,13 +285,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -261,13 +315,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -291,12 +345,30 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "double", "optional" : false, "description" : "" } @@ -321,13 +393,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -351,13 +423,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -381,13 +453,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -411,13 +483,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -441,13 +513,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -471,13 +543,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -501,13 +573,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -531,13 +603,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -561,12 +633,30 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", + "optional" : true, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "" + }, + { + "name" : "buckets", + "type" : "double", "optional" : false, "description" : "" } @@ -591,13 +681,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -621,13 +711,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -651,13 +741,13 @@ { "name" : "from", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -681,13 +771,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -711,13 +801,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -741,13 +831,13 @@ { "name" : "from", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -771,13 +861,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "double", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -801,13 +891,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "integer", - "optional" : false, + "optional" : true, "description" : "" } ], @@ -831,13 +921,13 @@ { "name" : "from", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" }, { "name" : "to", "type" : "long", - "optional" : false, + "optional" : true, "description" : "" } ], diff --git a/docs/reference/esql/functions/kibana/docs/bucket.md b/docs/reference/esql/functions/kibana/docs/bucket.md index 6ebfe7de5527d..210c10a4ebe28 100644 --- a/docs/reference/esql/functions/kibana/docs/bucket.md +++ b/docs/reference/esql/functions/kibana/docs/bucket.md @@ -3,6 +3,6 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### BUCKET -Creates human-friendly buckets and returns a datetime value -for each row that corresponds to the resulting bucket the row falls into. +Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either +be provided directly, or chosen based on a recommended count and values range. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec new file mode 100644 index 0000000000000..d2dd798a45a7d --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -0,0 +1,475 @@ +// BUCKET-specific tests + +// +// Date bucketing +// + +bucketSimpleMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_month[] +ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") +| STATS date=VALUES(date) BY bucket=BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_month[] +; + +// tag::bucket_month-result[] + date:datetime | bucket:datetime +1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z +// end::bucket_month-result[] +; + +bucketSimpleWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_week[] +ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") +| STATS date=VALUES(date) BY bucket=BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +// end::bucket_week[] +; + +// tag::bucket_week-result[] + date:datetime | bucket:datetime +1985-07-09T00:00:00.000Z | 1985-07-08T00:00:00.000Z +// end::bucket_week-result[] +; + +bucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hd = MV_SORT(VALUES(hire_date)) BY b = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT hd +; + + hd:date | b:date +[1985-02-18T00:00:00.000Z, 1985-02-24T00:00:00.000Z] |1985-02-01T00:00:00.000Z +1985-05-13T00:00:00.000Z |1985-05-01T00:00:00.000Z +1985-07-09T00:00:00.000Z |1985-07-01T00:00:00.000Z +1985-09-17T00:00:00.000Z |1985-09-01T00:00:00.000Z +[1985-10-14T00:00:00.000Z, 1985-10-20T00:00:00.000Z] |1985-10-01T00:00:00.000Z +[1985-11-19T00:00:00.000Z, 1985-11-20T00:00:00.000Z, 1985-11-21T00:00:00.000Z]|1985-11-01T00:00:00.000Z +; + +bucketWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +from employees +| where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" +| stats hire_date = mv_sort(values(hire_date)) by hd = bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| sort hire_date +; + +hire_date:date | hd:date +[1985-02-18T00:00:00.000Z, 1985-02-24T00:00:00.000Z] |1985-02-18T00:00:00.000Z +1985-05-13T00:00:00.000Z |1985-05-13T00:00:00.000Z +1985-07-09T00:00:00.000Z |1985-07-08T00:00:00.000Z +1985-09-17T00:00:00.000Z |1985-09-16T00:00:00.000Z +[1985-10-14T00:00:00.000Z, 1985-10-20T00:00:00.000Z] |1985-10-14T00:00:00.000Z +[1985-11-19T00:00:00.000Z, 1985-11-20T00:00:00.000Z, 1985-11-21T00:00:00.000Z]|1985-11-18T00:00:00.000Z +; + +bucketYearInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| STATS COUNT(*) by bucket = BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) +| sort bucket; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +bucketYearInAggConstRefsString#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = "1999-01-01T00:00:00Z" +| EVAL bucket_end = NOW() +| STATS COUNT(*) BY bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) +| SORT bucket +; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +bucketYearInAggConstRefsConcat#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = CONCAT("1999-01-01", "T", "00:00:00Z") +| EVAL bucket_end = NOW() +| STATS COUNT(*) by bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) +| sort bucket +; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +bucketYearInAggConstRefsDate#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = TO_DATETIME("1999-01-01T00:00:00.000Z") +| EVAL bucket_end = NOW() +| STATS COUNT(*) BY bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) +| SORT bucket +; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +bucketYearInAggConstRefsRename#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = "1999-01-01T00:00:00Z" +| EVAL bucket_end = NOW() +| RENAME bucket_end as be, bucket_start as bs +| STATS c = COUNT(*) by BUCKET(hire_date, 5, bs, be) +| SORT c +; + +c:long | BUCKET(hire_date, 5, bs, be):date +1 | 1999-01-01T00:00:00.000Z +; + +bucketMonthInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::bucket_in_agg[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS AVG(salary) BY bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT bucket +// end::bucket_in_agg[] +; + +// tag::bucket_in_agg-result[] +AVG(salary):double | bucket:date + 46305.0 | 1985-02-01T00:00:00.000Z + 44817.0 | 1985-05-01T00:00:00.000Z + 62405.0 | 1985-07-01T00:00:00.000Z + 49095.0 | 1985-09-01T00:00:00.000Z + 51532.0 | 1985-10-01T00:00:00.000Z + 54539.75 | 1985-11-01T00:00:00.000Z +// end::bucket_in_agg-result[] +; + +docsBucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonth[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hire_date = MV_SORT(VALUES(hire_date)) BY month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT hire_date +//end::docsBucketMonth[] +; + +//tag::docsBucketMonth-result[] + hire_date:date | month:date +[1985-02-18T00:00:00.000Z, 1985-02-24T00:00:00.000Z] |1985-02-01T00:00:00.000Z +1985-05-13T00:00:00.000Z |1985-05-01T00:00:00.000Z +1985-07-09T00:00:00.000Z |1985-07-01T00:00:00.000Z +1985-09-17T00:00:00.000Z |1985-09-01T00:00:00.000Z +[1985-10-14T00:00:00.000Z, 1985-10-20T00:00:00.000Z] |1985-10-01T00:00:00.000Z +[1985-11-19T00:00:00.000Z, 1985-11-20T00:00:00.000Z, 1985-11-21T00:00:00.000Z]|1985-11-01T00:00:00.000Z +//end::docsBucketMonth-result[] +; + +docsBucketMonthlyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketMonthlyHistogram[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hires_per_month = COUNT(*) BY month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT month +//end::docsBucketMonthlyHistogram[] +; + +//tag::docsBucketMonthlyHistogram-result[] + hires_per_month:long | month:date +2 |1985-02-01T00:00:00.000Z +1 |1985-05-01T00:00:00.000Z +1 |1985-07-01T00:00:00.000Z +1 |1985-09-01T00:00:00.000Z +2 |1985-10-01T00:00:00.000Z +4 |1985-11-01T00:00:00.000Z +//end::docsBucketMonthlyHistogram-result[] +; + +docsBucketWeeklyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketWeeklyHistogram[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hires_per_week = COUNT(*) BY week = BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT week +//end::docsBucketWeeklyHistogram[] +; + +//tag::docsBucketWeeklyHistogram-result[] + hires_per_week:long | week:date +2 |1985-02-18T00:00:00.000Z +1 |1985-05-13T00:00:00.000Z +1 |1985-07-08T00:00:00.000Z +1 |1985-09-16T00:00:00.000Z +2 |1985-10-14T00:00:00.000Z +4 |1985-11-18T00:00:00.000Z +//end::docsBucketWeeklyHistogram-result[] +; + +docsBucketLast24hr#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketLast24hr[] +FROM sample_data +| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() +| STATS COUNT(*) BY bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) +//end::docsBucketLast24hr[] +; + + COUNT(*):long | bucket:date +; + +docsGettingStartedBucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket[] +FROM sample_data +| STATS BY bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) +// end::gs-bucket[] +| LIMIT 0 +; + +bucket:date +; + +docsGettingStartedBucketStatsBy#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by[] +FROM sample_data +| STATS c = COUNT(*) BY bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +// end::gs-bucket-stats-by[] +| SORT bucket +; + + c:long | bucket:date +2 |2023-10-23T12:00:00.000Z +5 |2023-10-23T13:00:00.000Z +; + +docsGettingStartedBucketStatsByMedian#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +// tag::gs-bucket-stats-by-median[] +FROM sample_data +| KEEP @timestamp, event_duration +| STATS median_duration = MEDIAN(event_duration) BY bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +// end::gs-bucket-stats-by-median[] +| SORT bucket +; + +median_duration:double | bucket:date +3107561.0 |2023-10-23T12:00:00.000Z +1756467.0 |2023-10-23T13:00:00.000Z +; + +bucketByTimeDuration#[skip:-8.13.99, reason:BUCKET extended in 8.14] +FROM sample_data +| STATS min = MAX(@timestamp), max = MAX(@timestamp) BY bucket = BUCKET(@timestamp, 30 minutes) +| SORT min +; + + min:date | max:date | bucket:date +2023-10-23T12:27:28.948Z|2023-10-23T12:27:28.948Z|2023-10-23T12:00:00.000Z +2023-10-23T13:55:01.543Z|2023-10-23T13:55:01.543Z|2023-10-23T13:30:00.000Z + +; + +aggByTimeDuratinBucket#[skip:-8.13.99, reason:BUCKET extended in 8.14] +FROM sample_data +| STATS c = COUNT(1) BY b = BUCKET(@timestamp, 30 minutes) +| SORT c +; + + c:long | b:date +2 |2023-10-23T12:00:00.000Z +5 |2023-10-23T13:30:00.000Z +; + +bucketByDatePeriod#[skip:-8.13.99, reason:BUCKET extended in 8.14] +FROM sample_data +| EVAL adjusted = CASE(TO_LONG(@timestamp) % 2 == 0, @timestamp + 1 month, @timestamp + 2 years) +| STATS c = COUNT(*) BY b = BUCKET(adjusted, 1 month) +| SORT c +; + + c:long | b:date +3 |2025-10-01T00:00:00.000Z +4 |2023-11-01T00:00:00.000Z + +; + +aggByDatePeriodBucket#[skip:-8.13.99, reason:BUCKET extended in 8.14] +FROM sample_data +| EVAL adjusted = CASE(TO_LONG(@timestamp) % 2 == 0, @timestamp + 1 month, @timestamp + 2 years) +| STATS c = COUNT(1) BY b = BUCKET(adjusted, 1 month) +| SORT c DESC +; + + c:long | b:date +4 |2023-11-01T00:00:00.000Z +3 |2025-10-01T00:00:00.000Z +; + +// +// Numeric bucketing +// + +bucketNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS c = COUNT(*) BY b = BUCKET(salary, 20, 25324, 74999) +| SORT b +; + + c:long | b:double +1 |25000.0 +1 |30000.0 +1 |40000.0 +2 |45000.0 +2 |50000.0 +1 |55000.0 +1 |60000.0 +1 |65000.0 +1 |70000.0 +; + +docsBucketNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +//tag::docsBucketNumeric[] +FROM employees +| STATS COUNT(*) by bs = BUCKET(salary, 20, 25324, 74999) +| SORT bs +//end::docsBucketNumeric[] +; + +//tag::docsBucketNumeric-result[] + COUNT(*):long | bs:double +9 |25000.0 +9 |30000.0 +18 |35000.0 +11 |40000.0 +11 |45000.0 +10 |50000.0 +7 |55000.0 +9 |60000.0 +8 |65000.0 +8 |70000.0 +//end::docsBucketNumeric-result[] +; + +// bucketing in span mode (identical results to above) +bucketNumericWithSpan#[skip:-8.13.99, reason:BUCKET extended in 8.14] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS c = COUNT(1) BY b = BUCKET(salary, 5000.) +| SORT b +; + + c:long | b:double +1 |25000.0 +1 |30000.0 +1 |40000.0 +2 |45000.0 +2 |50000.0 +1 |55000.0 +1 |60000.0 +1 |65000.0 +1 |70000.0 +; + +bucketNumericMixedTypes#[skip:-8.13.99, reason:BUCKET extended in 8.14] +ROW long = TO_LONG(100), double = 99., int = 100 +| STATS BY b1 = BUCKET(long, 99.), b2 = BUCKET(double, 100.), b3 = BUCKET(int, 49.5) +; + + b1:double| b2:double| b3:double +99.0 |0.0 |99.0 +; + +bucketWithFloats#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hire_date = MV_SORT(VALUES(hire_date)) BY bh = ROUND(BUCKET(height, 20, 1.41, 2.10), 2) +| SORT hire_date, bh +; + + hire_date:date | bh:double +1985-02-18T00:00:00.000Z |1.85 +[1985-02-24T00:00:00.000Z, 1985-05-13T00:00:00.000Z]|2.0 +[1985-07-09T00:00:00.000Z, 1985-11-19T00:00:00.000Z]|1.8 +1985-09-17T00:00:00.000Z |1.4 +1985-10-14T00:00:00.000Z |1.75 +[1985-10-20T00:00:00.000Z, 1985-11-20T00:00:00.000Z]|1.9 +1985-11-20T00:00:00.000Z |1.95 +1985-11-21T00:00:00.000Z |2.05 +; + +bucketWithUnsignedLong#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM ul_logs +| WHERE @timestamp >= "2017-11-10T20:30:00Z" AND @timestamp < "2017-11-10T20:35:00Z" +| STATS ts = VALUES(@timestamp) BY bh = bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) +| SORT ts +; + + ts:date | bh:double +2017-11-10T20:32:57.000Z|8.0E18 +2017-11-10T20:33:06.000Z|5.0E18 +2017-11-10T20:34:43.000Z|1.75E19 +; + +bucketMultipleAndExpressions#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS sumK = (b1k + b2k) / 1000 BY b1k = BUCKET(salary, 1000.), b2k = BUCKET(salary, 2000.) +| SORT sumK +| LIMIT 4 +; + + sumK:double | b1k:double | b2k:double +49.0 |25000.0 |24000.0 +52.0 |26000.0 |26000.0 +53.0 |27000.0 |26000.0 +56.0 |28000.0 |28000.0 +; + +// +// BIN copies +// + +docsGettingStartedBin#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM sample_data +| STATS BY bin = BIN(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) +| LIMIT 0 +; + +bin:date +; + +aggByTimeDuratinBin#[skip:-8.13.99, reason:BUCKET extended in 8.14] +FROM sample_data +| STATS c = COUNT(1) BY b = BIN(@timestamp, 30 minutes) +| SORT c +; + + c:long | b:date +2 |2023-10-23T12:00:00.000Z +5 |2023-10-23T13:30:00.000Z +; + +binNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS COUNT(*) by bin = BIN(salary, 20, 25324, 74999) +| SORT bin +; + + COUNT(*):long | bin:double +9 |25000.0 +9 |30000.0 +18 |35000.0 +11 |40000.0 +11 |45000.0 +10 |50000.0 +7 |55000.0 +9 |60000.0 +8 |65000.0 +8 |70000.0 +; + +binNumericMixedTypes#[skip:-8.13.99, reason:BUCKET extended in 8.14] +ROW long = TO_LONG(100), double = 99., int = 100 +| STATS BY b1 = BIN(long, 99.), b2 = BIN(double, 100.), b3 = BIN(int, 49.5) +; + + b1:double| b2:double| b3:double +99.0 |0.0 |99.0 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 57cda24d15fa2..73b784663de8c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -261,74 +261,6 @@ int:integer |dt:date // end::to_datetime-int-result[] ; -bucketSimpleMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::bucket_month[] -ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=BUCKET(date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::bucket_month[] -; - -// tag::bucket_month-result[] - date:datetime | bucket:datetime -1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z -// end::bucket_month-result[] -; - -bucketSimpleWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::bucket_week[] -ROW date=TO_DATETIME("1985-07-09T00:00:00.000Z") -| EVAL bucket=BUCKET(date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -// end::bucket_week[] -; - -// tag::bucket_week-result[] - date:datetime | bucket:datetime -1985-07-09T00:00:00.000Z | 1985-07-08T00:00:00.000Z -// end::bucket_week-result[] -; - -bucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -from employees -| where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -| sort hire_date -| keep hire_date, hd; - -hire_date:date | hd:date -1985-02-18T00:00:00.000Z | 1985-02-01T00:00:00.000Z -1985-02-24T00:00:00.000Z | 1985-02-01T00:00:00.000Z -1985-05-13T00:00:00.000Z | 1985-05-01T00:00:00.000Z -1985-07-09T00:00:00.000Z | 1985-07-01T00:00:00.000Z -1985-09-17T00:00:00.000Z | 1985-09-01T00:00:00.000Z -1985-10-14T00:00:00.000Z | 1985-10-01T00:00:00.000Z -1985-10-20T00:00:00.000Z | 1985-10-01T00:00:00.000Z -1985-11-19T00:00:00.000Z | 1985-11-01T00:00:00.000Z -1985-11-20T00:00:00.000Z | 1985-11-01T00:00:00.000Z -1985-11-20T00:00:00.000Z | 1985-11-01T00:00:00.000Z -1985-11-21T00:00:00.000Z | 1985-11-01T00:00:00.000Z -; - -bucketWeek#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -from employees -| where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z" -| eval hd = bucket(hire_date, 55, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -| sort hire_date -| keep hire_date, hd; - -hire_date:date | hd:date -1985-02-18T00:00:00.000Z | 1985-02-18T00:00:00.000Z -1985-02-24T00:00:00.000Z | 1985-02-18T00:00:00.000Z -1985-05-13T00:00:00.000Z | 1985-05-13T00:00:00.000Z -1985-07-09T00:00:00.000Z | 1985-07-08T00:00:00.000Z -1985-09-17T00:00:00.000Z | 1985-09-16T00:00:00.000Z -1985-10-14T00:00:00.000Z | 1985-10-14T00:00:00.000Z -1985-10-20T00:00:00.000Z | 1985-10-14T00:00:00.000Z -1985-11-19T00:00:00.000Z | 1985-11-18T00:00:00.000Z -1985-11-20T00:00:00.000Z | 1985-11-18T00:00:00.000Z -1985-11-20T00:00:00.000Z | 1985-11-18T00:00:00.000Z -1985-11-21T00:00:00.000Z | 1985-11-18T00:00:00.000Z -; - now row a = now() | eval x = a == now(), y = substring(date_format("yyyy", a), 0, 2) | keep x, y; @@ -350,92 +282,6 @@ from employees | where birth_date > now() | sort emp_no asc | keep emp_no, birth emp_no:integer | birth_date:date ; -bucketYearInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM employees -| WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket = BUCKET(hire_date, 5, "1999-01-01T00:00:00Z", NOW()) -| STATS COUNT(*) by bucket -| sort bucket; - -COUNT(*):long | bucket:date -1 | 1999-01-01T00:00:00.000Z -; - -bucketYearInAggConstRefsString#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM employees -| WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket_start = "1999-01-01T00:00:00Z" -| EVAL bucket_end = NOW() -| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) -| STATS COUNT(*) by bucket -| sort bucket; - -COUNT(*):long | bucket:date -1 | 1999-01-01T00:00:00.000Z -; - -bucketYearInAggConstRefsConcat#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM employees -| WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket_start = CONCAT("1999-01-01", "T", "00:00:00Z") -| EVAL bucket_end = NOW() -| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) -| STATS COUNT(*) by bucket -| sort bucket; - -COUNT(*):long | bucket:date -1 | 1999-01-01T00:00:00.000Z -; - -bucketYearInAggConstRefsDate#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM employees -| WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket_start = TO_DATETIME("1999-01-01T00:00:00.000Z") -| EVAL bucket_end = NOW() -| EVAL bucket = BUCKET(hire_date, 5, bucket_start, bucket_end) -| keep bucket_start, bucket_end, bucket -| STATS COUNT(*) by bucket -| sort bucket; - -COUNT(*):long | bucket:date -1 | 1999-01-01T00:00:00.000Z -; - -bucketYearInAggConstRefsRename#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM employees -| WHERE hire_date >= "1999-01-01T00:00:00Z" -| EVAL bucket_start = "1999-01-01T00:00:00Z" -| EVAL bucket_end = NOW() -| RENAME bucket_end as be, bucket_start as bs -| STATS c = COUNT(*) by BUCKET(hire_date, 5, bs, be) -| SORT c -; - -c:long | BUCKET(hire_date, 5, bs, be):date -1 | 1999-01-01T00:00:00.000Z -; - -bucketMonthInAgg#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::bucket_in_agg[] -FROM employees -| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -| STATS AVG(salary) BY bucket -| SORT bucket -// end::bucket_in_agg[] -; - -// tag::bucket_in_agg-result[] -AVG(salary):double | bucket:date - 46305.0 | 1985-02-01T00:00:00.000Z - 44817.0 | 1985-05-01T00:00:00.000Z - 62405.0 | 1985-07-01T00:00:00.000Z - 49095.0 | 1985-09-01T00:00:00.000Z - 51532.0 | 1985-10-01T00:00:00.000Z - 54539.75 | 1985-11-01T00:00:00.000Z -// end::bucket_in_agg-result[] -; - evalDateDiffInNanoAndMicroAndMilliSeconds#[skip:-8.12.99, reason:date_diff added in 8.13] ROW date1=to_datetime("2023-12-02T11:00:00.000Z"), date2=to_datetime("2023-12-02T11:00:00.001Z") | EVAL dd_ns1=date_diff("nanoseconds", date1, date2), dd_ns2=date_diff("ns", date1, date2) @@ -961,128 +807,6 @@ birth_date:datetime 1953-04-21T00:00:00.000Z ; -docsBucketMonth#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -//tag::docsBucketMonth[] -FROM employees -| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -| KEEP hire_date, month -| SORT hire_date -//end::docsBucketMonth[] -; - -//tag::docsBucketMonth-result[] - hire_date:date | month:date -1985-02-18T00:00:00.000Z|1985-02-01T00:00:00.000Z -1985-02-24T00:00:00.000Z|1985-02-01T00:00:00.000Z -1985-05-13T00:00:00.000Z|1985-05-01T00:00:00.000Z -1985-07-09T00:00:00.000Z|1985-07-01T00:00:00.000Z -1985-09-17T00:00:00.000Z|1985-09-01T00:00:00.000Z -1985-10-14T00:00:00.000Z|1985-10-01T00:00:00.000Z -1985-10-20T00:00:00.000Z|1985-10-01T00:00:00.000Z -1985-11-19T00:00:00.000Z|1985-11-01T00:00:00.000Z -1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z -1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z -1985-11-21T00:00:00.000Z|1985-11-01T00:00:00.000Z -//end::docsBucketMonth-result[] -; - -docsBucketMonthlyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -//tag::docsBucketMonthlyHistogram[] -FROM employees -| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL month = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -| STATS hires_per_month = COUNT(*) BY month -| SORT month -//end::docsBucketMonthlyHistogram[] -; - -//tag::docsBucketMonthlyHistogram-result[] - hires_per_month:long | month:date -2 |1985-02-01T00:00:00.000Z -1 |1985-05-01T00:00:00.000Z -1 |1985-07-01T00:00:00.000Z -1 |1985-09-01T00:00:00.000Z -2 |1985-10-01T00:00:00.000Z -4 |1985-11-01T00:00:00.000Z -//end::docsBucketMonthlyHistogram-result[] -; - -docsBucketWeeklyHistogram#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -//tag::docsBucketWeeklyHistogram[] -FROM employees -| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL week = BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") -| STATS hires_per_week = COUNT(*) BY week -| SORT week -//end::docsBucketWeeklyHistogram[] -; - -//tag::docsBucketWeeklyHistogram-result[] - hires_per_week:long | week:date -2 |1985-02-18T00:00:00.000Z -1 |1985-05-13T00:00:00.000Z -1 |1985-07-08T00:00:00.000Z -1 |1985-09-16T00:00:00.000Z -2 |1985-10-14T00:00:00.000Z -4 |1985-11-18T00:00:00.000Z -//end::docsBucketWeeklyHistogram-result[] -; - -docsBucketLast24hr#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -//tag::docsBucketLast24hr[] -FROM sample_data -| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() -| EVAL bucket = BUCKET(@timestamp, 25, NOW() - 1 day, NOW()) -| STATS COUNT(*) BY bucket -//end::docsBucketLast24hr[] -; - - COUNT(*):long | bucket:date -; - -docsGettingStartedBucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::gs-bucket[] -FROM sample_data -| KEEP @timestamp -| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", NOW()) -// end::gs-bucket[] -| LIMIT 0 -; - -@timestamp:date | bucket:date -; - -docsGettingStartedBucketStatsBy#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::gs-bucket-stats-by[] -FROM sample_data -| KEEP @timestamp, event_duration -| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") -| STATS COUNT(*) BY bucket -// end::gs-bucket-stats-by[] -| SORT bucket -; - -COUNT(*):long | bucket:date -2 |2023-10-23T12:00:00.000Z -5 |2023-10-23T13:00:00.000Z -; - -docsGettingStartedBucketStatsByMedian#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::gs-bucket-stats-by-median[] -FROM sample_data -| KEEP @timestamp, event_duration -| EVAL bucket = BUCKET(@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") -| STATS median_duration = MEDIAN(event_duration) BY bucket -// end::gs-bucket-stats-by-median[] -| SORT bucket -; - -median_duration:double | bucket:date -3107561.0 |2023-10-23T12:00:00.000Z -1756467.0 |2023-10-23T13:00:00.000Z -; - dateExtract // tag::dateExtract[] ROW date = DATE_PARSE("yyyy-MM-dd", "2022-05-06") diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 8f8f218fd9821..8af770c521243 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -257,28 +257,6 @@ emp_no:integer | salary_change:double | a1:double 10005 | [-2.14,13.07] | [-2.14,13.07] ; -bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM employees -| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bh = bucket(height, 20, 1.41, 2.10) -| SORT hire_date, height -| KEEP hire_date, height, bh -; - -hire_date:date | height:double | bh:double -1985-02-18T00:00:00.000Z | 1.85 | 1.85 -1985-02-24T00:00:00.000Z | 2.0 | 2.0 -1985-05-13T00:00:00.000Z | 2.0 | 2.0 -1985-07-09T00:00:00.000Z | 1.83 | 1.8 -1985-09-17T00:00:00.000Z | 1.45 | 1.4000000000000001 -1985-10-14T00:00:00.000Z | 1.77 | 1.75 -1985-10-20T00:00:00.000Z | 1.94 | 1.9000000000000001 -1985-11-19T00:00:00.000Z | 1.8 | 1.8 -1985-11-20T00:00:00.000Z | 1.93 | 1.9000000000000001 -1985-11-20T00:00:00.000Z | 1.99 | 1.9500000000000002 -1985-11-21T00:00:00.000Z | 2.08 | 2.0500000000000003 -; - cos // tag::cos[] ROW a=1.8 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index f9cec120fa763..59b7c9e600c40 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -642,56 +642,6 @@ emp_no:integer | salary_change.long:long | a1:long 10005 | [-2, 13] | [-2, 13] ; -bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -// tag::bucket[] -FROM employees -| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" -| EVAL bs = BUCKET(salary, 20, 25324, 74999) -| SORT hire_date, salary -| KEEP hire_date, salary, bs -// end::bucket[] -; - -// tag::bucket-result[] -hire_date:date | salary:integer | bs:double -1985-02-18T00:00:00.000Z | 66174 | 65000.0 -1985-02-24T00:00:00.000Z | 26436 | 25000.0 -1985-05-13T00:00:00.000Z | 44817 | 40000.0 -1985-07-09T00:00:00.000Z | 62405 | 60000.0 -1985-09-17T00:00:00.000Z | 49095 | 45000.0 -1985-10-14T00:00:00.000Z | 54329 | 50000.0 -1985-10-20T00:00:00.000Z | 48735 | 45000.0 -1985-11-19T00:00:00.000Z | 52833 | 50000.0 -1985-11-20T00:00:00.000Z | 33956 | 30000.0 -1985-11-20T00:00:00.000Z | 74999 | 70000.0 -1985-11-21T00:00:00.000Z | 56371 | 55000.0 -// end::bucket-result[] -; - -docsBucketNumeric#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -//tag::docsBucketNumeric[] -FROM employees -| EVAL bs = BUCKET(salary, 20, 25324, 74999) -| STATS COUNT(*) by bs -| SORT bs -//end::docsBucketNumeric[] -; - -//tag::docsBucketNumeric-result[] - COUNT(*):long | bs:double -9 |25000.0 -9 |30000.0 -18 |35000.0 -11 |40000.0 -11 |45000.0 -10 |50000.0 -7 |55000.0 -9 |60000.0 -8 |65000.0 -8 |70000.0 -//end::docsBucketNumeric-result[] -; - cos ROW a=2 | EVAL cos=COS(a); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 591aa3fbcc63f..966aa3225f953 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -8,7 +8,8 @@ synopsis:keyword "double atan(number:double|integer|long|unsigned_long)" "double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" "double avg(number:double|integer|long)" -"double|date bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" +"double|date bin(field:integer|long|double|date, buckets:integer|double|date_period|time_duration, ?from:integer|long|double|date, ?to:integer|long|double|date)" +"double|date bucket(field:integer|long|double|date, buckets:integer|double|date_period|time_duration, ?from:integer|long|double|date, ?to:integer|long|double|date)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" "boolean cidr_match(ip:ip, blockX...:keyword|text)" @@ -120,7 +121,8 @@ asin |number |"double|integer|long|unsigne atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] avg |number |"double|integer|long" |[""] -bucket |[field, buckets, from, to] |["integer|long|double|date", integer, "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] +bin |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] +bucket |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[, CIDR block to test the IP against.] @@ -233,7 +235,8 @@ asin |Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] atan |Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. avg |The average of a numeric field. -bucket |Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. +bin |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. +bucket |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. @@ -347,7 +350,8 @@ asin |double atan |double |false |false |false atan2 |double |[false, false] |false |false avg |double |false |false |true -bucket |"double|date" |[false, false, false, false]|false |false +bin |"double|date" |[false, false, true, true] |false |false +bucket |"double|date" |[false, false, true, true] |false |false case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |[false, false] |true |false ceil |"double|integer|long|unsigned_long" |false |false |false cidr_match |boolean |[false, false] |true |false @@ -459,9 +463,9 @@ sinh |"double sinh(angle:double|integer|long|unsigned_long)" |angle // see https://github.com/elastic/elasticsearch/issues/102120 -countFunctions#[skip:-8.13.99] +countFunctions#[skip:-8.13.99, reason:BIN added] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -104 | 104 | 104 +105 | 105 | 105 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 2bf9259478032..fa524d270bb98 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -150,20 +150,6 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc 2017-11-10T20:21:58.000Z|154551962150890564|9382204513185396493|63 |OK ; -bucket#[skip:-8.13.99, reason:BUCKET renamed in 8.14] -FROM ul_logs -| WHERE @timestamp >= "2017-11-10T20:30:00Z" AND @timestamp < "2017-11-10T20:35:00Z" -| EVAL bh = bucket(bytes_in, 20, 5480608687137202404, 17764691215469285192) -| SORT @timestamp -| KEEP @timestamp, bytes_in, bh -; - -@timestamp:date | bytes_in:ul | bh:double -2017-11-10T20:32:57.000Z | 8420006392678593250 | 8.0E18 -2017-11-10T20:33:06.000Z | 5480608687137202404 | 5.0E18 -2017-11-10T20:34:43.000Z | 17764691215469285192 | 1.75E19 -; - toDegrees required_feature: esql.mv_warn diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index de6c3208df2ec..f55653c6800c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -140,7 +141,7 @@ else if (p.resolved()) { return; } checkFilterConditionType(p, failures); - checkAggregate(p, failures, aliases); + checkAggregate(p, failures); checkRegexExtractOnlyOnStrings(p, failures); checkRow(p, failures); @@ -160,7 +161,7 @@ else if (p.resolved()) { return failures; } - private static void checkAggregate(LogicalPlan p, Set failures, AttributeMap aliases) { + private static void checkAggregate(LogicalPlan p, Set failures) { if (p instanceof Aggregate agg) { List groupings = agg.groupings(); AttributeSet groupRefs = new AttributeSet(); @@ -170,6 +171,21 @@ private static void checkAggregate(LogicalPlan p, Set failures, Attribu e.forEachUp(g -> { if (g instanceof AggregateFunction af) { failures.add(fail(g, "cannot use an aggregate [{}] for grouping", af)); + } else if (g instanceof GroupingFunction gf) { + gf.children() + .forEach( + c -> c.forEachDown( + GroupingFunction.class, + inner -> failures.add( + fail( + inner, + "cannot nest grouping functions; found [{}] inside [{}]", + inner.sourceText(), + gf.sourceText() + ) + ) + ) + ); } }); // keep the grouping attributes (common case) @@ -191,11 +207,16 @@ private static void checkAggregate(LogicalPlan p, Set failures, Attribu // traverse the tree to find invalid matches checkInvalidNamedExpressionUsage(exp, groupings, groupRefs, failures, 0); }); + } else { + p.forEachExpression( + GroupingFunction.class, + gf -> failures.add(fail(gf, "cannot use grouping function [{}] outside of a STATS command", gf.sourceText())) + ); } } // traverse the expression and look either for an agg function or a grouping match - // stop either when no children are left, the leaves are literals or a reference attribute is given + // stop either when no children are left, the leafs are literals or a reference attribute is given private static void checkInvalidNamedExpressionUsage( Expression e, List groups, @@ -208,13 +229,19 @@ private static void checkInvalidNamedExpressionUsage( af.field().forEachDown(AggregateFunction.class, f -> { failures.add(fail(f, "nested aggregations [{}] not allowed inside other aggregations [{}]", f, af)); }); + } else if (e instanceof GroupingFunction gf) { + // optimizer will later unroll expressions with aggs and non-aggs with a grouping function into an EVAL, but that will no longer + // be verified (by check above in checkAggregate()), so do it explicitly here + if (groups.stream().anyMatch(ex -> ex instanceof Alias a && a.child().semanticEquals(gf)) == false) { + failures.add(fail(gf, "can only use grouping function [{}] part of the BY clause", gf.sourceText())); + } else if (level == 0) { + addFailureOnGroupingUsedNakedInAggs(failures, gf, "function"); + } } else if (e.foldable()) { // don't do anything } else if (groups.contains(e) || groupRefs.contains(e)) { if (level == 0) { - failures.add( - fail(e, "grouping key [{}] cannot be used as an aggregate once declared in the STATS BY clause", e.sourceText()) - ); + addFailureOnGroupingUsedNakedInAggs(failures, e, "key"); } } // if a reference is found, mark it as an error @@ -246,6 +273,12 @@ else if (e instanceof NamedExpression ne) { } } + private static void addFailureOnGroupingUsedNakedInAggs(Set failures, Expression e, String element) { + failures.add( + fail(e, "grouping {} [{}] cannot be used as an aggregate once declared in the STATS BY clause", element, e.sourceText()) + ); + } + private static void checkRegexExtractOnlyOnStrings(LogicalPlan p, Set failures) { if (p instanceof RegexExtract re) { Expression expr = re.input(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 9ec0f4514d981..f7d737a82c279 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; +import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; @@ -50,7 +51,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -176,6 +176,8 @@ public EsqlFunctionRegistry() { private FunctionDefinition[][] functions() { return new FunctionDefinition[][] { + // grouping functions + new FunctionDefinition[] { def(Bucket.class, Bucket::new, "bucket", "bin"), }, // aggregate functions new FunctionDefinition[] { def(Avg.class, Avg::new, "avg"), @@ -195,7 +197,6 @@ private FunctionDefinition[][] functions() { def(Asin.class, Asin::new, "asin"), def(Atan.class, Atan::new, "atan"), def(Atan2.class, Atan2::new, "atan2"), - def(Bucket.class, Bucket::new, "bucket"), def(Ceil.class, Ceil::new, "ceil"), def(Cos.class, Cos::new, "cos"), def(Cosh.class, Cosh::new, "cosh"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java similarity index 59% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index ee78d012c421d..9b35bf1033586 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.expression.function.scalar.math; +package org.elasticsearch.xpack.esql.expression.function.grouping; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Rounding; @@ -15,46 +15,44 @@ import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.common.Failures; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Foldables; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.function.TwoOptionalArguments; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.List; -import java.util.function.BiFunction; import java.util.function.Function; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.expression.Validations.isFoldable; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FOURTH; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** - * Buckets dates into a given number of buckets. - *

    - * Takes a date field and three constants and picks a bucket size based on the - * constants. The constants are "target bucket count", "from", and "to". It looks like: - * {@code bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")}. - * We have a list of "human" bucket sizes like "one month" and "four hours". We pick - * the largest range that covers the range in fewer than the target bucket count. So - * in the above case we'll pick month long buckets, yielding 12 buckets. - *

    + * Splits dates and numbers into a given number of buckets. There are two ways to invoke + * this function: with a user-provided span (explicit invocation mode), or a span derived + * from a number of desired buckets (as a hint) and a range (auto mode). + * In the former case, two parameters will be provided, in the latter four. */ -public class Bucket extends EsqlScalarFunction implements Validatable { +public class Bucket extends GroupingFunction implements Validatable, TwoOptionalArguments { // TODO maybe we should just cover the whole of representable dates here - like ten years, 100 years, 1000 years, all the way up. // That way you never end up with more than the target number of buckets. private static final Rounding LARGEST_HUMAN_DATE_ROUNDING = Rounding.builder(Rounding.DateTimeUnit.YEAR_OF_CENTURY).build(); @@ -78,22 +76,24 @@ public class Bucket extends EsqlScalarFunction implements Validatable { Rounding.builder(TimeValue.timeValueMillis(10)).build(), Rounding.builder(TimeValue.timeValueMillis(1)).build(), }; + private static final ZoneId DEFAULT_TZ = ZoneOffset.UTC; // TODO: plug in the config + private final Expression field; private final Expression buckets; private final Expression from; private final Expression to; @FunctionInfo(returnType = { "double", "date" }, description = """ - Creates human-friendly buckets and returns a datetime value - for each row that corresponds to the resulting bucket the row falls into.""") + Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either + be provided directly, or chosen based on a recommended count and values range.""") public Bucket( Source source, @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, - @Param(name = "buckets", type = { "integer" }) Expression buckets, - @Param(name = "from", type = { "integer", "long", "double", "date" }) Expression from, - @Param(name = "to", type = { "integer", "long", "double", "date" }) Expression to + @Param(name = "buckets", type = { "integer", "double", "date_period", "time_duration" }) Expression buckets, + @Param(name = "from", type = { "integer", "long", "double", "date" }, optional = true) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date" }, optional = true) Expression to ) { - super(source, List.of(field, buckets, from, to)); + super(source, from != null && to != null ? List.of(field, buckets, from, to) : List.of(field, buckets)); this.field = field; this.buckets = buckets; this.from = from; @@ -102,28 +102,38 @@ public Bucket( @Override public boolean foldable() { - return field.foldable() && buckets.foldable() && from.foldable() && to.foldable(); + return field.foldable() && buckets.foldable() && (from == null || from.foldable()) && (to == null || to.foldable()); } @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - int b = ((Number) buckets.fold()).intValue(); - if (field.dataType() == DataTypes.DATETIME) { - long f = foldToLong(from); - long t = foldToLong(to); - return DateTrunc.evaluator( - source(), - toEvaluator.apply(field), - new DateRoundingPicker(b, f, t).pickRounding().prepareForUnknown() - ); + Rounding.Prepared preparedRounding; + if (buckets.dataType().isInteger()) { + int b = ((Number) buckets.fold()).intValue(); + long f = foldToLong(from); + long t = foldToLong(to); + preparedRounding = new DateRoundingPicker(b, f, t).pickRounding().prepareForUnknown(); + } else { + assert EsqlDataTypes.isTemporalAmount(buckets.dataType()) : "Unexpected span data type [" + buckets.dataType() + "]"; + preparedRounding = DateTrunc.createRounding(buckets.fold(), DEFAULT_TZ); + } + return DateTrunc.evaluator(source(), toEvaluator.apply(field), preparedRounding); } if (field.dataType().isNumeric()) { - double f = ((Number) from.fold()).doubleValue(); - double t = ((Number) to.fold()).doubleValue(); + double roundTo; + if (from != null) { + int b = ((Number) buckets.fold()).intValue(); + double f = ((Number) from.fold()).doubleValue(); + double t = ((Number) to.fold()).doubleValue(); + roundTo = pickRounding(b, f, t); + } else { + assert buckets.dataType().isRational() : "Unexpected rounding data type [" + buckets.dataType() + "]"; + roundTo = ((Number) buckets.fold()).doubleValue(); + } + Literal rounding = new Literal(source(), roundTo, DataTypes.DOUBLE); // We could make this more efficient, either by generating the evaluators with byte code or hand rolling this one. - Literal rounding = new Literal(source(), pickRounding(b, f, t), DataTypes.DOUBLE); Div div = new Div(source(), field, rounding); Floor floor = new Floor(source(), div); Mul mul = new Mul(source(), floor, rounding); @@ -170,30 +180,70 @@ private double pickRounding(int buckets, double from, double to) { return precise < halfPower ? halfPower : nextPowerOfTen; } + // supported parameter type combinations (1st, 2nd, 3rd, 4th): + // datetime, integer, string/datetime, string/datetime + // datetime, rounding/duration, -, - + // numeric, integer, numeric, numeric + // numeric, double, -, - @Override protected TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } + var fieldType = field.dataType(); + var bucketsType = buckets.dataType(); + if (fieldType == DataTypes.NULL || bucketsType == DataTypes.NULL) { + return TypeResolution.TYPE_RESOLVED; + } - if (field.dataType() == DataTypes.DATETIME) { - return resolveType((e, o) -> isStringOrDate(e, sourceText(), o)); + if (fieldType == DataTypes.DATETIME) { + TypeResolution resolution = isType( + buckets, + dt -> dt.isInteger() || EsqlDataTypes.isTemporalAmount(dt), + sourceText(), + SECOND, + "integral", + "date_period", + "time_duration" + ); + return bucketsType.isInteger() + ? resolution.and(checkArgsCount(4)) + .and(() -> isStringOrDate(from, sourceText(), THIRD)) + .and(() -> isStringOrDate(to, sourceText(), FOURTH)) + : resolution.and(checkArgsCount(2)); // temporal amount } - if (field.dataType().isNumeric()) { - return resolveType((e, o) -> isNumeric(e, sourceText(), o)); + if (fieldType.isNumeric()) { + return bucketsType.isInteger() + ? checkArgsCount(4).and(() -> isNumeric(from, sourceText(), THIRD)).and(() -> isNumeric(to, sourceText(), FOURTH)) + : isNumeric(buckets, sourceText(), SECOND).and(checkArgsCount(2)); } return isType(field, e -> false, sourceText(), FIRST, "datetime", "numeric"); } - private TypeResolution resolveType(BiFunction checkThirdAndForth) { - TypeResolution resolution = isInteger(buckets, sourceText(), SECOND); - if (resolution.unresolved()) { - return resolution; + private TypeResolution checkArgsCount(int expectedCount) { + String expected = null; + if (expectedCount == 2 && (from != null || to != null)) { + expected = "two"; + } else if (expectedCount == 4 && (from == null || to == null)) { + expected = "four"; + } else if ((from == null && to != null) || (from != null && to == null)) { + expected = "two or four"; } - return checkThirdAndForth.apply(from, THIRD).and(checkThirdAndForth.apply(to, FOURTH)); + + return expected == null + ? TypeResolution.TYPE_RESOLVED + : new TypeResolution( + format( + null, + "function expects exactly {} arguments when the first one is of type [{}] and the second of type [{}]", + expected, + field.dataType(), + buckets.dataType() + ) + ); } - public static TypeResolution isStringOrDate(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + private static TypeResolution isStringOrDate(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { return TypeResolutions.isType( e, exp -> DataTypes.isString(exp) || DataTypes.isDateTime(exp), @@ -208,7 +258,9 @@ public static TypeResolution isStringOrDate(Expression e, String operationName, public void validate(Failures failures) { String operation = sourceText(); - failures.add(isFoldable(buckets, operation, SECOND)).add(isFoldable(from, operation, THIRD)).add(isFoldable(to, operation, FOURTH)); + failures.add(isFoldable(buckets, operation, SECOND)) + .add(from != null ? isFoldable(from, operation, THIRD) : null) + .add(to != null ? isFoldable(to, operation, FOURTH) : null); } private long foldToLong(Expression e) { @@ -226,7 +278,9 @@ public DataType dataType() { @Override public Expression replaceChildren(List newChildren) { - return new Bucket(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2), newChildren.get(3)); + Expression from = newChildren.size() > 2 ? newChildren.get(2) : null; + Expression to = newChildren.size() > 3 ? newChildren.get(3) : null; + return new Bucket(source(), newChildren.get(0), newChildren.get(1), from, to); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java new file mode 100644 index 0000000000000..61b04c5e51ace --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingFunction.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.grouping; + +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.Function; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; + +public abstract class GroupingFunction extends Function implements EvaluatorMapper { + + protected GroupingFunction(Source source, List fields) { + super(source, fields); + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Override + public final ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 7e58a5b3fc8cf..c5d8865f32ceb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -38,6 +38,8 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; +import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; +import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; @@ -71,7 +73,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; @@ -382,7 +383,6 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Trim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), // ScalarFunction of(ScalarFunction.class, Atan2.class, PlanNamedTypes::writeAtan2, PlanNamedTypes::readAtan2), - of(ScalarFunction.class, Bucket.class, PlanNamedTypes::writeBucket, PlanNamedTypes::readBucket), of(ScalarFunction.class, Case.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, CIDRMatch.class, PlanNamedTypes::writeCIDRMatch, PlanNamedTypes::readCIDRMatch), of(ScalarFunction.class, Coalesce.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), @@ -421,6 +421,8 @@ public static List namedTypeEntries() { of(ArithmeticOperation.class, Mul.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), of(ArithmeticOperation.class, Div.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), of(ArithmeticOperation.class, Mod.class, PlanNamedTypes::writeArithmeticOperation, PlanNamedTypes::readArithmeticOperation), + // GroupingFunctions + of(GroupingFunction.class, Bucket.class, PlanNamedTypes::writeBucket, PlanNamedTypes::readBucket), // AggregateFunctions of(AggregateFunction.class, Avg.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, Count.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), @@ -1402,15 +1404,21 @@ static void writeAtan2(PlanStreamOutput out, Atan2 atan2) throws IOException { } static Bucket readBucket(PlanStreamInput in) throws IOException { - return new Bucket(in.readSource(), in.readExpression(), in.readExpression(), in.readExpression(), in.readExpression()); + return new Bucket( + in.readSource(), + in.readExpression(), + in.readExpression(), + in.readOptionalNamed(Expression.class), + in.readOptionalNamed(Expression.class) + ); } static void writeBucket(PlanStreamOutput out, Bucket bucket) throws IOException { out.writeSource(bucket.source()); out.writeExpression(bucket.field()); out.writeExpression(bucket.buckets()); - out.writeExpression(bucket.from()); - out.writeExpression(bucket.to()); + out.writeOptionalExpression(bucket.from()); + out.writeOptionalExpression(bucket.to()); } static final Map, ScalarFunction>> VARARG_CTORS = Map.ofEntries( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8275f76d9a55c..651ad2c548e55 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -121,6 +121,59 @@ public void testAggsInsideGrouping() { ); } + public void testGroupingInsideAggsAsAgg() { + assertEquals( + "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + error("from test| stats bucket(emp_no, 5.) by emp_no") + ); + assertEquals( + "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + error("from test| stats bucket(emp_no, 5.)") + ); + assertEquals( + "1:18: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + error("from test| stats bucket(emp_no, 5.) by bucket(emp_no, 6.)") + ); + assertEquals( + "1:22: can only use grouping function [bucket(emp_no, 5.)] part of the BY clause", + error("from test| stats 3 + bucket(emp_no, 5.) by bucket(emp_no, 6.)") + ); + } + + public void testGroupingInsideAggsAsGrouping() { + assertEquals( + "1:18: grouping function [bucket(emp_no, 5.)] cannot be used as an aggregate once declared in the STATS BY clause", + error("from test| stats bucket(emp_no, 5.) by bucket(emp_no, 5.)") + ); + assertEquals( + "1:18: grouping function [bucket(emp_no, 5.)] cannot be used as an aggregate once declared in the STATS BY clause", + error("from test| stats bucket(emp_no, 5.) by emp_no, bucket(emp_no, 5.)") + ); + assertEquals( + "1:18: grouping function [bucket(emp_no, 5.)] cannot be used as an aggregate once declared in the STATS BY clause", + error("from test| stats bucket(emp_no, 5.) by x = bucket(emp_no, 5.)") + ); + assertEquals( + "1:22: grouping function [bucket(emp_no, 5.)] cannot be used as an aggregate once declared in the STATS BY clause", + error("from test| stats z = bucket(emp_no, 5.) by x = bucket(emp_no, 5.)") + ); + assertEquals( + "1:22: grouping function [bucket(emp_no, 5.)] cannot be used as an aggregate once declared in the STATS BY clause", + error("from test| stats y = bucket(emp_no, 5.) by y = bucket(emp_no, 5.)") + ); + assertEquals( + "1:22: grouping function [bucket(emp_no, 5.)] cannot be used as an aggregate once declared in the STATS BY clause", + error("from test| stats z = bucket(emp_no, 5.) by bucket(emp_no, 5.)") + ); + } + + public void testGroupingInsideGrouping() { + assertEquals( + "1:40: cannot nest grouping functions; found [bucket(emp_no, 5.)] inside [bucket(bucket(emp_no, 5.), 6.)]", + error("from test| stats max(emp_no) by bucket(bucket(emp_no, 5.), 6.)") + ); + } + public void testAggsWithInvalidGrouping() { assertEquals( "1:35: column [languages] cannot be used as an aggregate once declared in the STATS BY grouping key [l = languages % 3]", @@ -177,6 +230,21 @@ public void testGroupingInAggs() { """)); } + public void testBucketOnlyInAggs() { + assertEquals( + "1:23: cannot use grouping function [BUCKET(emp_no, 100.)] outside of a STATS command", + error("FROM test | WHERE ABS(BUCKET(emp_no, 100.)) > 0") + ); + assertEquals( + "1:22: cannot use grouping function [BUCKET(emp_no, 100.)] outside of a STATS command", + error("FROM test | EVAL 3 + BUCKET(emp_no, 100.)") + ); + assertEquals( + "1:18: cannot use grouping function [BUCKET(emp_no, 100.)] outside of a STATS command", + error("FROM test | SORT BUCKET(emp_no, 100.)") + ); + } + public void testDoubleRenamingField() { assertEquals( "1:44: Column [emp_no] renamed to [r1] and is no longer available [emp_no as r3]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 0b6c64679dc1f..5c3bc2b6b0350 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -90,6 +90,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -548,12 +549,13 @@ public static void testFunctionInfo() { for (int i = 0; i < args.size(); i++) { typesFromSignature.add(new HashSet<>()); } + Function typeName = dt -> dt.esType() != null ? dt.esType() : dt.typeName(); for (Map.Entry, DataType> entry : signatures().entrySet()) { List types = entry.getKey(); for (int i = 0; i < args.size() && i < types.size(); i++) { - typesFromSignature.get(i).add(signatureType(types.get(i))); + typesFromSignature.get(i).add(typeName.apply(types.get(i))); } - returnFromSignature.add(entry.getValue().esType()); + returnFromSignature.add(typeName.apply(entry.getValue())); } for (int i = 0; i < args.size(); i++) { @@ -573,10 +575,6 @@ public static void testFunctionInfo() { } - private static String signatureType(DataType type) { - return type.esType() != null ? type.esType() : type.typeName(); - } - /** * Adds cases with {@code null} and asserts that the result is {@code null}. *

    diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java index dbb178e08bce5..cc2714dc31dca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/BucketTests.java @@ -15,12 +15,16 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.hamcrest.Matcher; +import java.time.Duration; +import java.time.Period; import java.util.ArrayList; import java.util.List; import java.util.function.LongSupplier; @@ -37,9 +41,26 @@ public BucketTests(@Name("TestCase") Supplier testCas public static Iterable parameters() { List suppliers = new ArrayList<>(); dateCases(suppliers, "fixed date", () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z")); + dateCasesWithSpan( + suppliers, + "fixed date with period", + () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-01-01T00:00:00.00Z"), + EsqlDataTypes.DATE_PERIOD, + Period.ofYears(1) + ); + dateCasesWithSpan( + suppliers, + "fixed date with duration", + () -> DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2023-02-17T09:00:00.00Z"), + EsqlDataTypes.TIME_DURATION, + Duration.ofDays(1L) + ); numberCases(suppliers, "fixed long", DataTypes.LONG, () -> 100L); + numberCasesWithSpan(suppliers, "fixed long with span", DataTypes.LONG, () -> 100L); numberCases(suppliers, "fixed int", DataTypes.INTEGER, () -> 100); + numberCasesWithSpan(suppliers, "fixed int with span", DataTypes.INTEGER, () -> 100); numberCases(suppliers, "fixed double", DataTypes.DOUBLE, () -> 100.0); + numberCasesWithSpan(suppliers, "fixed double with span", DataTypes.DOUBLE, () -> 100.); // TODO make errorsForCasesWithoutExamples do something sensible for 4+ parameters return parameterSuppliersFromTypedData( anyNullIsNull( @@ -86,6 +107,26 @@ private static TestCaseSupplier.TypedData dateBound(String name, DataType type, return new TestCaseSupplier.TypedData(value, type, name).forceLiteral(); } + private static void dateCasesWithSpan( + List suppliers, + String name, + LongSupplier date, + DataType spanType, + Object span + ) { + suppliers.add(new TestCaseSupplier(name, List.of(DataTypes.DATETIME, spanType), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(date.getAsLong(), DataTypes.DATETIME, "field")); + args.add(new TestCaseSupplier.TypedData(span, spanType, "buckets").forceLiteral()); + return new TestCaseSupplier.TestCase( + args, + "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", + DataTypes.DATETIME, + dateResultsMatcher(args) + ); + })); + } + private static final DataType[] NUMBER_BOUNDS_TYPES = new DataType[] { DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE }; private static void numberCases(List suppliers, String name, DataType numberType, Supplier number) { @@ -131,6 +172,34 @@ private static TestCaseSupplier.TypedData numericBound(String name, DataType typ return new TestCaseSupplier.TypedData(v, type, name).forceLiteral(); } + private static void numberCasesWithSpan(List suppliers, String name, DataType numberType, Supplier number) { + suppliers.add(new TestCaseSupplier(name, List.of(numberType, DataTypes.DOUBLE), () -> { + List args = new ArrayList<>(); + args.add(new TestCaseSupplier.TypedData(number.get(), "field")); + args.add(new TestCaseSupplier.TypedData(50., DataTypes.DOUBLE, "span").forceLiteral()); + String attr = "Attribute[channel=0]"; + if (numberType == DataTypes.INTEGER) { + attr = "CastIntToDoubleEvaluator[v=" + attr + "]"; + } else if (numberType == DataTypes.LONG) { + attr = "CastLongToDoubleEvaluator[v=" + attr + "]"; + } + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=" + + attr + + ", " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + dateResultsMatcher(args) + ); + })); + + } + + private static TestCaseSupplier.TypedData keywordDateLiteral(String name, DataType type, String date) { + return new TestCaseSupplier.TypedData(date, type, name).forceLiteral(); + } + private static Matcher dateResultsMatcher(List typedData) { if (typedData.get(0).type() == DataTypes.DATETIME) { long millis = ((Number) typedData.get(0).data()).longValue(); @@ -141,7 +210,13 @@ private static Matcher dateResultsMatcher(List args) { - return new Bucket(source, args.get(0), args.get(1), args.get(2), args.get(3)); + Expression from = null; + Expression to = null; + if (args.size() > 2) { + from = args.get(2); + to = args.get(3); + } + return new Bucket(source, args.get(0), args.get(1), from, to); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 32e46ee544d07..579a998755eb7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3303,9 +3303,9 @@ public void testBucketAcceptsEvalLiteralReferences() { var plan = plan(""" from test | eval bucket_start = 1, bucket_end = 100000 - | eval bucket(salary, 10, bucket_start, bucket_end) + | stats by bucket(salary, 10, bucket_start, bucket_end) """); - var ab = as(plan, Eval.class); + var ab = as(plan, Limit.class); assertTrue(ab.optimized()); } @@ -3313,12 +3313,12 @@ public void testBucketFailsOnFieldArgument() { VerificationException e = expectThrows(VerificationException.class, () -> plan(""" from test | eval bucket_end = 100000 - | eval bucket(salary, 10, emp_no, bucket_end) + | stats by bucket(salary, 10, emp_no, bucket_end) """)); assertTrue(e.getMessage().startsWith("Found ")); final String header = "Found 1 problem\nline "; assertEquals( - "3:27: third argument of [bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", + "3:31: third argument of [bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", e.getMessage().substring(header.length()) ); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java index 0bbe663dab90e..7a0f685e65838 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/Expression.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.List; +import java.util.function.Supplier; /** * In a SQL statement, an Expression is whatever a user specifies inside an @@ -54,6 +55,10 @@ public TypeResolution and(TypeResolution other) { return failed ? this : other; } + public TypeResolution and(Supplier other) { + return failed ? this : other.get(); + } + public String message() { return message; } From 4413835b7f00e4eae59962d1d9dc3a75923b95dd Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 16 Apr 2024 13:10:28 +0200 Subject: [PATCH 043/130] Remove code remnants from semantic_text previous iterations in feature branch (#107508) Removes changes for code already removed from the `semantic_text` feature branch. This is prep work for opening up `semantic_text` PRs in order to remove irrelevant changes. --- .../xcontent/support/XContentMapValues.java | 2 +- .../index/mapper/InferenceModelFieldType.java | 21 --- .../inference/SemanticTextModelSettings.java | 91 ------------ .../xpack/ml/MachineLearning.java | 14 +- .../xpack/ml/SemanticTextFeature.java | 24 ---- .../ml/mapper/SemanticTextFieldMapper.java | 130 ------------------ .../mapper/SemanticTextFieldMapperTests.java | 118 ---------------- 7 files changed, 2 insertions(+), 398 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java delete mode 100644 server/src/main/java/org/elasticsearch/inference/SemanticTextModelSettings.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java delete mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 805931550ad62..f527b4cd8d684 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -555,7 +555,7 @@ public static Map nodeMapValue(Object node, String desc) { if (node instanceof Map) { return (Map) node; } else { - throw new ElasticsearchParseException(desc + " should be a hash but was of type: " + node.getClass()); + throw new ElasticsearchParseException(desc + " should be a map but was of type: " + node.getClass()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java deleted file mode 100644 index 490d7f36219cf..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/mapper/InferenceModelFieldType.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.index.mapper; - -/** - * Field type that uses an inference model. - */ -public interface InferenceModelFieldType { - /** - * Retrieve inference model used by the field type. - * - * @return model id used by the field type - */ - String getInferenceModel(); -} diff --git a/server/src/main/java/org/elasticsearch/inference/SemanticTextModelSettings.java b/server/src/main/java/org/elasticsearch/inference/SemanticTextModelSettings.java deleted file mode 100644 index 78773bfb72a95..0000000000000 --- a/server/src/main/java/org/elasticsearch/inference/SemanticTextModelSettings.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.inference; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * Model settings that are interesting for semantic_text inference fields. This class is used to serialize common - * ServiceSettings methods when building inference for semantic_text fields. - * - * @param taskType task type - * @param inferenceId inference id - * @param dimensions number of dimensions. May be null if not applicable - * @param similarity similarity used by the service. May be null if not applicable - */ -public record SemanticTextModelSettings( - TaskType taskType, - String inferenceId, - @Nullable Integer dimensions, - @Nullable SimilarityMeasure similarity -) { - - public static final String NAME = "model_settings"; - private static final ParseField TASK_TYPE_FIELD = new ParseField("task_type"); - private static final ParseField INFERENCE_ID_FIELD = new ParseField("inference_id"); - private static final ParseField DIMENSIONS_FIELD = new ParseField("dimensions"); - private static final ParseField SIMILARITY_FIELD = new ParseField("similarity"); - - public SemanticTextModelSettings(TaskType taskType, String inferenceId, Integer dimensions, SimilarityMeasure similarity) { - Objects.requireNonNull(taskType, "task type must not be null"); - Objects.requireNonNull(inferenceId, "inferenceId must not be null"); - this.taskType = taskType; - this.inferenceId = inferenceId; - this.dimensions = dimensions; - this.similarity = similarity; - } - - public SemanticTextModelSettings(Model model) { - this( - model.getTaskType(), - model.getInferenceEntityId(), - model.getServiceSettings().dimensions(), - model.getServiceSettings().similarity() - ); - } - - public static SemanticTextModelSettings parse(XContentParser parser) throws IOException { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { - TaskType taskType = TaskType.fromString((String) args[0]); - String inferenceId = (String) args[1]; - Integer dimensions = (Integer) args[2]; - SimilarityMeasure similarity = args[3] == null ? null : SimilarityMeasure.fromString((String) args[2]); - return new SemanticTextModelSettings(taskType, inferenceId, dimensions, similarity); - }); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), TASK_TYPE_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFERENCE_ID_FIELD); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), DIMENSIONS_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), SIMILARITY_FIELD); - } - - public Map asMap() { - Map attrsMap = new HashMap<>(); - attrsMap.put(TASK_TYPE_FIELD.getPreferredName(), taskType.toString()); - attrsMap.put(INFERENCE_ID_FIELD.getPreferredName(), inferenceId); - if (dimensions != null) { - attrsMap.put(DIMENSIONS_FIELD.getPreferredName(), dimensions); - } - if (similarity != null) { - attrsMap.put(SIMILARITY_FIELD.getPreferredName(), similarity); - } - return Map.of(NAME, attrsMap); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 5ef7311179e4f..7fa2bcca952bf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -49,7 +49,6 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -69,7 +68,6 @@ import org.elasticsearch.plugins.CircuitBreakerPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.Plugin; @@ -365,7 +363,6 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactory; import org.elasticsearch.xpack.ml.job.snapshot.upgrader.SnapshotUpgradeTaskExecutor; import org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutor; -import org.elasticsearch.xpack.ml.mapper.SemanticTextFieldMapper; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; @@ -485,8 +482,7 @@ public class MachineLearning extends Plugin PersistentTaskPlugin, SearchPlugin, ShutdownAwarePlugin, - ExtensiblePlugin, - MapperPlugin { + ExtensiblePlugin { public static final String NAME = "ml"; public static final String BASE_PATH = "/_ml/"; // Endpoints that were deprecated in 7.x can still be called in 8.x using the REST compatibility layer @@ -2308,12 +2304,4 @@ public void signalShutdown(Collection shutdownNodeIds) { mlLifeCycleService.get().signalGracefulShutdown(shutdownNodeIds); } } - - @Override - public Map getMappers() { - if (SemanticTextFeature.isEnabled()) { - return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); - } - return Map.of(); - } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java deleted file mode 100644 index f861760803e56..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * semantic_text feature flag. When the feature is complete, this flag will be removed. - */ -public class SemanticTextFeature { - - private SemanticTextFeature() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("semantic_text"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java deleted file mode 100644 index cf713546a071a..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.mapper; - -import org.apache.lucene.search.Query; -import org.elasticsearch.common.Strings; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.DocumentParserContext; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.InferenceModelFieldType; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperBuilderContext; -import org.elasticsearch.index.mapper.SimpleMappedFieldType; -import org.elasticsearch.index.mapper.SourceValueFetcher; -import org.elasticsearch.index.mapper.TextSearchInfo; -import org.elasticsearch.index.mapper.ValueFetcher; -import org.elasticsearch.index.query.SearchExecutionContext; - -import java.io.IOException; -import java.util.Map; - -/** - * A {@link FieldMapper} for semantic text fields. These fields have a model id reference, that is used for performing inference - * at ingestion and query time. - * For now, it is compatible with text expansion models only, but will be extended to support dense vector models as well. - * This field mapper performs no indexing, as inference results will be included as a different field in the document source, and will - * be indexed using a different field mapper. - */ -public class SemanticTextFieldMapper extends FieldMapper { - - public static final String CONTENT_TYPE = "semantic_text"; - - private static SemanticTextFieldMapper toType(FieldMapper in) { - return (SemanticTextFieldMapper) in; - } - - public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n), notInMultiFields(CONTENT_TYPE)); - - private SemanticTextFieldMapper(String simpleName, MappedFieldType mappedFieldType, CopyTo copyTo) { - super(simpleName, mappedFieldType, MultiFields.empty(), copyTo); - } - - @Override - public FieldMapper.Builder getMergeBuilder() { - return new Builder(simpleName()).init(this); - } - - @Override - protected void parseCreateField(DocumentParserContext context) throws IOException { - // Just parses text - no indexing is performed - context.parser().textOrNull(); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - public SemanticTextFieldType fieldType() { - return (SemanticTextFieldType) super.fieldType(); - } - - public static class Builder extends FieldMapper.Builder { - - private final Parameter modelId = Parameter.stringParam("model_id", false, m -> toType(m).fieldType().modelId, null) - .addValidator(v -> { - if (Strings.isEmpty(v)) { - throw new IllegalArgumentException("field [model_id] must be specified"); - } - }); - - private final Parameter> meta = Parameter.metaParam(); - - public Builder(String name) { - super(name); - } - - @Override - protected Parameter[] getParameters() { - return new Parameter[] { modelId, meta }; - } - - @Override - public SemanticTextFieldMapper build(MapperBuilderContext context) { - return new SemanticTextFieldMapper(name(), new SemanticTextFieldType(name(), modelId.getValue(), meta.getValue()), copyTo); - } - } - - public static class SemanticTextFieldType extends SimpleMappedFieldType implements InferenceModelFieldType { - - private final String modelId; - - public SemanticTextFieldType(String name, String modelId, Map meta) { - super(name, false, false, false, TextSearchInfo.NONE, meta); - this.modelId = modelId; - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public String getInferenceModel() { - return modelId; - } - - @Override - public Query termQuery(Object value, SearchExecutionContext context) { - throw new IllegalArgumentException("termQuery not implemented yet"); - } - - @Override - public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { - return SourceValueFetcher.toString(name(), context, format); - } - - @Override - public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { - throw new IllegalArgumentException("[semantic_text] fields do not support sorting, scripting or aggregating"); - } - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java deleted file mode 100644 index ccb8f106e4945..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.mapper; - -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MapperTestCase; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.junit.AssumptionViolatedException; - -import java.io.IOException; -import java.util.Collection; -import java.util.List; - -import static java.util.Collections.singletonList; -import static org.hamcrest.Matchers.containsString; - -public class SemanticTextFieldMapperTests extends MapperTestCase { - - public void testDefaults() throws Exception { - DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); - - ParsedDocument doc1 = mapper.parse(source(this::writeField)); - List fields = doc1.rootDoc().getFields("field"); - - // No indexable fields - assertTrue(fields.isEmpty()); - } - - public void testModelIdNotPresent() throws IOException { - Exception e = expectThrows( - MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text"))) - ); - assertThat(e.getMessage(), containsString("field [model_id] must be specified")); - } - - public void testCannotBeUsedInMultiFields() { - Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { - b.field("type", "text"); - b.startObject("fields"); - b.startObject("semantic"); - b.field("type", "semantic_text"); - b.endObject(); - b.endObject(); - }))); - assertThat(e.getMessage(), containsString("Field [semantic] of type [semantic_text] can't be used in multifields")); - } - - public void testUpdatesToModelIdNotSupported() throws IOException { - MapperService mapperService = createMapperService( - fieldMapping(b -> b.field("type", "semantic_text").field("model_id", "test_model")) - ); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> merge(mapperService, fieldMapping(b -> b.field("type", "semantic_text").field("model_id", "another_model"))) - ); - assertThat(e.getMessage(), containsString("Cannot update parameter [model_id] from [test_model] to [another_model]")); - } - - @Override - protected Collection getPlugins() { - return singletonList(new MachineLearning(Settings.EMPTY)); - } - - @Override - protected void minimalMapping(XContentBuilder b) throws IOException { - b.field("type", "semantic_text").field("model_id", "test_model"); - } - - @Override - protected Object getSampleValueForDocument() { - return "value"; - } - - @Override - protected boolean supportsIgnoreMalformed() { - return false; - } - - @Override - protected boolean supportsStoredFields() { - return false; - } - - @Override - protected void registerParameters(ParameterChecker checker) throws IOException {} - - @Override - protected Object generateRandomInputValue(MappedFieldType ft) { - assumeFalse("doc_values are not supported in semantic_text", true); - return null; - } - - @Override - protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - throw new AssumptionViolatedException("not supported"); - } - - @Override - protected IngestScriptSupport ingestScriptSupport() { - throw new AssumptionViolatedException("not supported"); - } -} From 5ba6d1f321f697b0ab0e1ea487b12bbdb59fc2e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 16 Apr 2024 13:19:16 +0200 Subject: [PATCH 044/130] [Transform] Fix TransformRobustnessIT.testTransformLifecycleInALoop test (#107510) --- .../transform/integration/TransformRobustnessIT.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index 4b7c42968f557..a311237b826fb 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -139,8 +139,14 @@ public void testContinuousTransformLifecycleInALoop() throws Exception { // Stop the transform with force set randomly. stopTransform(transformId, force); - // After the transform is stopped, there should be no transform task left. - assertThat(getTransformTasks(), is(empty())); + if (force) { + // If the "force" has been used, then the persistent task is removed from the cluster state but the local task can still + // be seen by the PersistentTasksNodeService. We need to wait until PersistentTasksNodeService reconciles the state. + assertBusy(() -> assertThat(getTransformTasks(), is(empty()))); + } else { + // If the "force" hasn't been used then we can expect the local task to be already gone. + assertThat(getTransformTasks(), is(empty())); + } assertThat(getTransformTasksFromClusterState(transformId), is(empty())); // Delete the transform. From e25f2458025c057cf6e46b1c7e893e190e43105e Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 16 Apr 2024 14:19:12 +0200 Subject: [PATCH 045/130] Validate versions for CCR snapshot on matching index version. (#107179) RestoreService will validate the snapshot based on max index version. However, we do not increment index version on every single release. To prevent attempting to restore an index of a future release, this change rejects the restore already earlier when building the snapshot info from a newer node matching the current index version. --- .../xpack/ccr/repository/CcrRepository.java | 33 +++++++++++++++++-- .../upgrades/CcrRollingUpgradeIT.java | 1 - 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 06e902caf0105..baf1509c73883 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ExceptionsHelper; @@ -47,6 +48,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.BuildVersion; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.engine.EngineException; @@ -77,6 +79,7 @@ import org.elasticsearch.repositories.blobstore.FileRestoreContext; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotDeleteListener; +import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotState; @@ -201,15 +204,41 @@ public void getSnapshotInfo( try { csDeduplicator.execute( new ThreadedActionListener<>(threadPool.executor(ThreadPool.Names.SNAPSHOT_META), listener.map(response -> { + Snapshot snapshot = new Snapshot(this.metadata.name(), SNAPSHOT_ID); + + // RestoreService will validate the snapshot is restorable based on the max index version. + // However, we do not increment index version on every single release. + // To prevent attempting to restore an index of a future version, we reject the restore + // already when building the snapshot info from newer nodes matching the current index version. + IndexVersion maxIndexVersion = response.getNodes().getMaxDataNodeCompatibleIndexVersion(); + if (IndexVersion.current().equals(maxIndexVersion)) { + for (var node : response.nodes()) { + if (node.canContainData() && node.getMaxIndexVersion().equals(maxIndexVersion)) { + // TODO: Revisit when looking into removing release version from DiscoveryNode + BuildVersion remoteVersion = BuildVersion.fromVersionId(node.getVersion().id); + if (remoteVersion.isFutureVersion()) { + throw new SnapshotException( + snapshot, + "the snapshot was created with version [" + + remoteVersion + + "] which is higher than the version of this node [" + + Build.current().version() + + "]" + ); + } + } + } + } + Metadata responseMetadata = response.metadata(); Map indicesMap = responseMetadata.indices(); consumer.accept( new SnapshotInfo( - new Snapshot(this.metadata.name(), SNAPSHOT_ID), + snapshot, List.copyOf(indicesMap.keySet()), List.copyOf(responseMetadata.dataStreams().keySet()), List.of(), - response.getNodes().getMaxDataNodeCompatibleIndexVersion(), + maxIndexVersion, SnapshotState.SUCCESS ) ); diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java index 68ebb43d607a1..e08660eb69206 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java @@ -205,7 +205,6 @@ public void testAutoFollowing() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104620") public void testCannotFollowLeaderInUpgradedCluster() throws Exception { if (upgradeState != UpgradeState.ALL) { return; From 984e793e445e31231cc0d8464d801fab8203aa08 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 16 Apr 2024 08:28:24 -0400 Subject: [PATCH 046/130] Add note about random sampler consistency (#107479) --- .../bucket/random-sampler-aggregation.asciidoc | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/reference/aggregations/bucket/random-sampler-aggregation.asciidoc b/docs/reference/aggregations/bucket/random-sampler-aggregation.asciidoc index efd025ed67ee6..9fc533104024d 100644 --- a/docs/reference/aggregations/bucket/random-sampler-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/random-sampler-aggregation.asciidoc @@ -94,6 +94,17 @@ higher sampling rates, the relative error is still low. NOTE: This represents the result of aggregations against a typical positively skewed APM data set which also has outliers in the upper tail. The linear dependence of the relative error on the sample size is found to hold widely, but the slope depends on the variation in the quantity being aggregated. As such, the variance in your own data may cause relative error rates to increase or decrease at a different rate. +[[random-sampler-consistency]] +==== Random sampler consistency + +For a given `probability` and `seed`, the random sampler aggregation is consistent when sampling unchanged data from the same shard. +However, this is background random sampling if a particular document is included in the sampled set or not is dependent on current number of segments. + +Meaning, replica vs. primary shards could return different values as different particular documents are sampled. + +If the shard changes in via doc addition, update, deletion, or segment merging, the particular documents sampled could change, and thus the resulting statistics could change. + +The resulting statistics used from the random sampler aggregation are approximate and should be treated as such. [[random-sampler-special-cases]] ==== Random sampling special cases @@ -105,6 +116,6 @@ for a bucket is `10,000` with `probability: 0.1`, the actual number of documents An exception to this is <>. Unique item counts are not suitable for automatic scaling. When interpreting the cardinality count, compare it -to the number of sampled docs provided in the top level `doc_count` within the random_sampler aggregation. It gives +to the number of sampled docs provided in the top level `doc_count` within the random_sampler aggregation. It gives you an idea of unique values as a percentage of total values. It may not reflect, however, the exact number of unique values for the given field. From a09ae3fdaebafc441b8e8067dd5346e26d888ad3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Tue, 16 Apr 2024 14:53:18 +0200 Subject: [PATCH 047/130] Refactor role descriptor parsing (#107430) The method for `RoleDescriptor` parsing is becoming more complex due to its usage being shared between API keys, file-based and native roles. In some cases we allow 2.X format, in others we disallow restrictions. Having a single method with multiple boolean flags that control inclusion/exclusion of fields is becoming hard to extend. This refactoring aims to allow easier introduction of new fields that should be conditionally supported in some cases but not others. One of such cases is introduction of `description` field that should only be supported for file-based and native roles but not for roles embedded in API keys. Relates to: #107088 --- .../core/security/action/apikey/ApiKey.java | 3 +- .../BulkUpdateApiKeyRequestTranslator.java | 3 +- .../apikey/CreateApiKeyRequestBuilder.java | 5 +- .../apikey/UpdateApiKeyRequestTranslator.java | 3 +- .../action/role/PutRoleRequestBuilder.java | 7 +- .../authc/CrossClusterAccessSubjectInfo.java | 5 +- .../core/security/authz/RoleDescriptor.java | 202 ++++++++++-------- .../authz/RoleDescriptorsIntersection.java | 4 +- .../GetServiceAccountResponseTests.java | 9 +- .../security/authz/RoleDescriptorTests.java | 85 ++++---- ...RoleWithRemoteIndicesPrivilegesRestIT.java | 2 +- .../security/authc/ApiKeyIntegTests.java | 14 +- .../authc/apikey/ApiKeySingleNodeTests.java | 22 +- .../xpack/security/authc/ApiKeyService.java | 6 +- .../security/authz/store/FileRolesStore.java | 6 +- .../authz/store/NativeRolesStore.java | 7 +- .../action/apikey/RestGrantApiKeyAction.java | 5 +- .../test/TestSecurityClient.java | 2 +- .../security/authc/ApiKeyServiceTests.java | 5 +- .../permission/FieldPermissionsTests.java | 47 ++-- 20 files changed, 244 insertions(+), 198 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java index 0120ed09e8fe4..57cf816a46072 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java @@ -395,6 +395,7 @@ public String toString() { + "]"; } + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); static final ConstructingObjectParser PARSER; static { PARSER = new ConstructingObjectParser<>("api_key", true, ApiKey::new); @@ -419,7 +420,7 @@ static int initializeParser(AbstractObjectParser parser) { parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), new ParseField("metadata")); parser.declareNamedObjects(optionalConstructorArg(), (p, c, n) -> { p.nextToken(); - return RoleDescriptor.parse(n, p, false); + return ROLE_DESCRIPTOR_PARSER.parse(n, p); }, new ParseField("role_descriptors")); parser.declareField( optionalConstructorArg(), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTranslator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTranslator.java index 57a5848970b2e..d4fdb2d7f1028 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTranslator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/BulkUpdateApiKeyRequestTranslator.java @@ -26,8 +26,9 @@ public interface BulkUpdateApiKeyRequestTranslator { BulkUpdateApiKeyRequest translate(RestRequest request) throws IOException; class Default implements BulkUpdateApiKeyRequestTranslator { + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); private static final ConstructingObjectParser PARSER = createParser( - (n, p) -> RoleDescriptor.parse(n, p, false) + (n, p) -> ROLE_DESCRIPTOR_PARSER.parse(n, p) ); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java index a79b3c74db006..b8c4ab326fd34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CreateApiKeyRequestBuilder.java @@ -31,9 +31,8 @@ * Request builder for populating a {@link CreateApiKeyRequest} */ public class CreateApiKeyRequestBuilder extends ActionRequestBuilder { - private static final ConstructingObjectParser PARSER = createParser( - (n, p) -> RoleDescriptor.parse(n, p, false) - ); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + private static final ConstructingObjectParser PARSER = createParser(ROLE_DESCRIPTOR_PARSER::parse); @SuppressWarnings("unchecked") public static ConstructingObjectParser createParser( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTranslator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTranslator.java index f70732dd50990..fa157224c79be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTranslator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTranslator.java @@ -25,7 +25,8 @@ public interface UpdateApiKeyRequestTranslator { UpdateApiKeyRequest translate(RestRequest request) throws IOException; class Default implements UpdateApiKeyRequestTranslator { - private static final ConstructingObjectParser PARSER = createParser((n, p) -> RoleDescriptor.parse(n, p, false)); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + private static final ConstructingObjectParser PARSER = createParser(ROLE_DESCRIPTOR_PARSER::parse); @SuppressWarnings("unchecked") protected static ConstructingObjectParser createParser( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index f389a39df7979..e2da04bb61534 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -21,6 +21,8 @@ */ public class PutRoleRequestBuilder extends ActionRequestBuilder { + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + public PutRoleRequestBuilder(ElasticsearchClient client) { super(client, PutRoleAction.INSTANCE, new PutRoleRequest()); } @@ -29,9 +31,8 @@ public PutRoleRequestBuilder(ElasticsearchClient client) { * Populate the put role request from the source and the role's name */ public PutRoleRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { - // we pass false as last parameter because we want to reject the request if field permissions - // are given in 2.x syntax - RoleDescriptor descriptor = RoleDescriptor.parse(name, source, false, xContentType, false); + // we want to reject the request if field permissions are given in 2.x syntax, hence we do not allow2xFormat + RoleDescriptor descriptor = ROLE_DESCRIPTOR_PARSER.parse(name, source, xContentType); assert name.equals(descriptor.getName()); request.name(name); request.cluster(descriptor.getClusterPrivileges()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java index b6b54846fc7c4..f91df320bb92d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/CrossClusterAccessSubjectInfo.java @@ -223,6 +223,9 @@ private void validate() { public static final class RoleDescriptorsBytes implements Writeable { public static final RoleDescriptorsBytes EMPTY = new RoleDescriptorsBytes(new BytesArray("{}")); + + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().build(); + private final BytesReference rawBytes; public RoleDescriptorsBytes(BytesReference rawBytes) { @@ -263,7 +266,7 @@ public Set toRoleDescriptors() { while (parser.nextToken() != XContentParser.Token.END_OBJECT) { parser.nextToken(); final String roleName = parser.currentName(); - roleDescriptors.add(RoleDescriptor.parse(roleName, parser, false)); + roleDescriptors.add(ROLE_DESCRIPTOR_PARSER.parse(roleName, parser)); } return Set.copyOf(roleDescriptors); } catch (IOException e) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index ecbd12a7f4643..d1d24e2e4461e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -420,80 +420,112 @@ public void writeTo(StreamOutput out) throws IOException { } } - public static RoleDescriptor parse(String name, BytesReference source, boolean allow2xFormat, XContentType xContentType) - throws IOException { - return parse(name, source, allow2xFormat, xContentType, true); + public static Parser.Builder parserBuilder() { + return new Parser.Builder(); } - public static RoleDescriptor parse( - String name, - BytesReference source, - boolean allow2xFormat, - XContentType xContentType, - boolean allowRestriction - ) throws IOException { - assert name != null; - try (XContentParser parser = createParser(source, xContentType)) { - return parse(name, parser, allow2xFormat, allowRestriction); - } - } + public record Parser(boolean allow2xFormat, boolean allowRestriction) { - public static RoleDescriptor parse(String name, XContentParser parser, boolean allow2xFormat) throws IOException { - return parse(name, parser, allow2xFormat, true); - } + public static final class Builder { + private boolean allow2xFormat = false; + private boolean allowRestriction = false; + + private Builder() {} + + public Builder allow2xFormat(boolean allow2xFormat) { + this.allow2xFormat = allow2xFormat; + return this; + } + + public Builder allowRestriction(boolean allowRestriction) { + this.allowRestriction = allowRestriction; + return this; + } + + public Parser build() { + return new Parser(allow2xFormat, allowRestriction); + } - public static RoleDescriptor parse(String name, XContentParser parser, boolean allow2xFormat, boolean allowRestriction) - throws IOException { - // validate name - Validation.Error validationError = Validation.Roles.validateRoleName(name, true); - if (validationError != null) { - ValidationException ve = new ValidationException(); - ve.addValidationError(validationError.toString()); - throw ve; } - // advance to the START_OBJECT token if needed - XContentParser.Token token = parser.currentToken() == null ? parser.nextToken() : parser.currentToken(); - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchParseException("failed to parse role [{}]. expected an object but found [{}] instead", name, token); + public RoleDescriptor parse(String name, BytesReference source, XContentType xContentType) throws IOException { + assert name != null; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + source, + xContentType + ) + ) { + return parse(name, parser); + } } - String currentFieldName = null; - IndicesPrivileges[] indicesPrivileges = null; - RemoteIndicesPrivileges[] remoteIndicesPrivileges = null; - String[] clusterPrivileges = null; - List configurableClusterPrivileges = Collections.emptyList(); - ApplicationResourcePrivileges[] applicationPrivileges = null; - String[] runAsUsers = null; - Restriction restriction = null; - Map metadata = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (Fields.INDEX.match(currentFieldName, parser.getDeprecationHandler()) - || Fields.INDICES.match(currentFieldName, parser.getDeprecationHandler())) { - indicesPrivileges = parseIndices(name, parser, allow2xFormat); - } else if (Fields.RUN_AS.match(currentFieldName, parser.getDeprecationHandler())) { - runAsUsers = readStringArray(name, parser, true); - } else if (Fields.CLUSTER.match(currentFieldName, parser.getDeprecationHandler())) { - clusterPrivileges = readStringArray(name, parser, true); - } else if (Fields.APPLICATIONS.match(currentFieldName, parser.getDeprecationHandler()) - || Fields.APPLICATION.match(currentFieldName, parser.getDeprecationHandler())) { - applicationPrivileges = parseApplicationPrivileges(name, parser); - } else if (Fields.GLOBAL.match(currentFieldName, parser.getDeprecationHandler())) { - configurableClusterPrivileges = ConfigurableClusterPrivileges.parse(parser); - } else if (Fields.METADATA.match(currentFieldName, parser.getDeprecationHandler())) { - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchParseException( - "expected field [{}] to be of type object, but found [{}] instead", - currentFieldName, - token - ); - } - metadata = parser.map(); - } else if (Fields.TRANSIENT_METADATA.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.START_OBJECT) { - // consume object but just drop - parser.map(); + + public RoleDescriptor parse(String name, XContentParser parser) throws IOException { + // validate name + Validation.Error validationError = Validation.Roles.validateRoleName(name, true); + if (validationError != null) { + ValidationException ve = new ValidationException(); + ve.addValidationError(validationError.toString()); + throw ve; + } + + // advance to the START_OBJECT token if needed + XContentParser.Token token = parser.currentToken() == null ? parser.nextToken() : parser.currentToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse role [{}]. expected an object but found [{}] instead", name, token); + } + String currentFieldName = null; + IndicesPrivileges[] indicesPrivileges = null; + RemoteIndicesPrivileges[] remoteIndicesPrivileges = null; + String[] clusterPrivileges = null; + List configurableClusterPrivileges = Collections.emptyList(); + ApplicationResourcePrivileges[] applicationPrivileges = null; + String[] runAsUsers = null; + Restriction restriction = null; + Map metadata = null; + String description = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (Fields.INDEX.match(currentFieldName, parser.getDeprecationHandler()) + || Fields.INDICES.match(currentFieldName, parser.getDeprecationHandler())) { + indicesPrivileges = parseIndices(name, parser, allow2xFormat); + } else if (Fields.RUN_AS.match(currentFieldName, parser.getDeprecationHandler())) { + runAsUsers = readStringArray(name, parser, true); + } else if (Fields.CLUSTER.match(currentFieldName, parser.getDeprecationHandler())) { + clusterPrivileges = readStringArray(name, parser, true); + } else if (Fields.APPLICATIONS.match(currentFieldName, parser.getDeprecationHandler()) + || Fields.APPLICATION.match(currentFieldName, parser.getDeprecationHandler())) { + applicationPrivileges = parseApplicationPrivileges(name, parser); + } else if (Fields.GLOBAL.match(currentFieldName, parser.getDeprecationHandler())) { + configurableClusterPrivileges = ConfigurableClusterPrivileges.parse(parser); + } else if (Fields.METADATA.match(currentFieldName, parser.getDeprecationHandler())) { + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException( + "expected field [{}] to be of type object, but found [{}] instead", + currentFieldName, + token + ); + } + metadata = parser.map(); + } else if (Fields.TRANSIENT_METADATA.match(currentFieldName, parser.getDeprecationHandler())) { + if (token == XContentParser.Token.START_OBJECT) { + // consume object but just drop + parser.map(); + } else { + throw new ElasticsearchParseException( + "failed to parse role [{}]. unexpected field [{}]", + name, + currentFieldName + ); + } + } else if (Fields.REMOTE_INDICES.match(currentFieldName, parser.getDeprecationHandler())) { + remoteIndicesPrivileges = parseRemoteIndices(name, parser); + } else if (allowRestriction && Fields.RESTRICTION.match(currentFieldName, parser.getDeprecationHandler())) { + restriction = Restriction.parse(name, parser); + } else if (Fields.TYPE.match(currentFieldName, parser.getDeprecationHandler())) { + // don't need it } else { throw new ElasticsearchParseException( "failed to parse role [{}]. unexpected field [{}]", @@ -501,28 +533,22 @@ public static RoleDescriptor parse(String name, XContentParser parser, boolean a currentFieldName ); } - } else if (Fields.REMOTE_INDICES.match(currentFieldName, parser.getDeprecationHandler())) { - remoteIndicesPrivileges = parseRemoteIndices(name, parser); - } else if (allowRestriction && Fields.RESTRICTION.match(currentFieldName, parser.getDeprecationHandler())) { - restriction = Restriction.parse(name, parser); - } else if (Fields.TYPE.match(currentFieldName, parser.getDeprecationHandler())) { - // don't need it - } else { - throw new ElasticsearchParseException("failed to parse role [{}]. unexpected field [{}]", name, currentFieldName); - } + } + return new RoleDescriptor( + name, + clusterPrivileges, + indicesPrivileges, + applicationPrivileges, + configurableClusterPrivileges.toArray(new ConfigurableClusterPrivilege[configurableClusterPrivileges.size()]), + runAsUsers, + metadata, + null, + remoteIndicesPrivileges, + restriction + ); + } - return new RoleDescriptor( - name, - clusterPrivileges, - indicesPrivileges, - applicationPrivileges, - configurableClusterPrivileges.toArray(new ConfigurableClusterPrivilege[configurableClusterPrivileges.size()]), - runAsUsers, - metadata, - null, - remoteIndicesPrivileges, - restriction - ); + } private static String[] readStringArray(String roleName, XContentParser parser, boolean allowNull) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java index bdfc87a06c922..446209b1d7ac3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorsIntersection.java @@ -26,6 +26,8 @@ public record RoleDescriptorsIntersection(Collection> roleDe public static RoleDescriptorsIntersection EMPTY = new RoleDescriptorsIntersection(Collections.emptyList()); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + public RoleDescriptorsIntersection(RoleDescriptor roleDescriptor) { this(List.of(Set.of(roleDescriptor))); } @@ -70,7 +72,7 @@ public static RoleDescriptorsIntersection fromXContent(XContentParser xContentPa while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, p); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, p.nextToken(), p); - roleDescriptors.add(RoleDescriptor.parse(p.currentName(), p, false)); + roleDescriptors.add(ROLE_DESCRIPTOR_PARSER.parse(p.currentName(), p)); } return Set.copyOf(roleDescriptors); }); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/service/GetServiceAccountResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/service/GetServiceAccountResponseTests.java index b473f64a3fbf9..b37b923ce96c5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/service/GetServiceAccountResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/service/GetServiceAccountResponseTests.java @@ -97,12 +97,9 @@ private void assertRoleDescriptorEquals(Map responseFragment, Ro @SuppressWarnings("unchecked") final Map descriptorMap = (Map) responseFragment.get("role_descriptor"); assertThat( - RoleDescriptor.parse( - roleDescriptor.getName(), - XContentTestUtils.convertToXContent(descriptorMap, XContentType.JSON), - false, - XContentType.JSON - ), + RoleDescriptor.parserBuilder() + .build() + .parse(roleDescriptor.getName(), XContentTestUtils.convertToXContent(descriptorMap, XContentType.JSON), XContentType.JSON), equalTo(roleDescriptor) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index 294ad7e975286..efa1dc2e29d10 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -162,13 +162,16 @@ public void testToXContentRoundtrip() throws Exception { final RoleDescriptor descriptor = randomRoleDescriptor(true, true, true); final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference xContentValue = toShuffledXContent(descriptor, xContentType, ToXContent.EMPTY_PARAMS, false); - final RoleDescriptor parsed = RoleDescriptor.parse(descriptor.getName(), xContentValue, false, xContentType); + final RoleDescriptor parsed = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .build() + .parse(descriptor.getName(), xContentValue, xContentType); assertThat(parsed, equalTo(descriptor)); } public void testParse() throws Exception { String q = "{\"cluster\":[\"a\", \"b\"]}"; - RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + RoleDescriptor rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(0, rd.getIndicesPrivileges().length); @@ -179,7 +182,7 @@ public void testParse() throws Exception { "cluster": [ "a", "b" ], "run_as": [ "m", "n" ] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(0, rd.getIndicesPrivileges().length); @@ -246,7 +249,7 @@ public void testParse() throws Exception { "workflows": ["search_application_query"] } }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(3, rd.getIndicesPrivileges().length); @@ -274,7 +277,7 @@ public void testParse() throws Exception { } } }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(1, rd.getIndicesPrivileges().length); @@ -285,7 +288,7 @@ public void testParse() throws Exception { q = """ {"cluster":["a", "b"], "metadata":{"foo":"bar"}}"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(q), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(0, rd.getIndicesPrivileges().length); @@ -327,7 +330,7 @@ public void testParse() throws Exception { } } }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(q), XContentType.JSON); assertThat(rd.getName(), equalTo("test")); assertThat(rd.getClusterPrivileges(), arrayContaining("a", "b")); assertThat(rd.getIndicesPrivileges().length, equalTo(1)); @@ -368,7 +371,7 @@ public void testParse() throws Exception { } } }"""; - rd = RoleDescriptor.parse("testUpdateProfile", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().build().parse("testUpdateProfile", new BytesArray(q), XContentType.JSON); assertThat(rd.getName(), is("testUpdateProfile")); assertThat(rd.getClusterPrivileges(), arrayContaining("manage")); assertThat(rd.getIndicesPrivileges(), Matchers.emptyArray()); @@ -393,7 +396,7 @@ public void testParse() throws Exception { q = """ {"applications": [{"application": "myapp", "resources": ["*"], "privileges": ["login" ]}] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(q), XContentType.JSON); assertThat(rd.getName(), equalTo("test")); assertThat(rd.getClusterPrivileges(), emptyArray()); assertThat(rd.getIndicesPrivileges(), emptyArray()); @@ -407,26 +410,26 @@ public void testParse() throws Exception { {"applications":[{"not_supported": true, "resources": ["*"], "privileges": ["my-app:login" ]}] }"""; final IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> RoleDescriptor.parse("test", new BytesArray(badJson), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(badJson), XContentType.JSON) ); assertThat(ex.getMessage(), containsString("not_supported")); - rd = RoleDescriptor.parse("test_empty_restriction", new BytesArray(""" + rd = RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test_empty_restriction", new BytesArray(""" { "index": [{"names": "idx1", "privileges": [ "p1", "p2" ]}], "restriction":{} - }"""), false, XContentType.JSON); + }"""), XContentType.JSON); assertThat(rd.getName(), equalTo("test_empty_restriction")); assertThat(rd.hasRestriction(), equalTo(false)); assertThat(rd.hasWorkflowsRestriction(), equalTo(false)); final ElasticsearchParseException pex1 = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test_null_workflows", new BytesArray(""" + () -> RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test_null_workflows", new BytesArray(""" { "index": [{"names": ["idx1"], "privileges": [ "p1", "p2" ]}], "restriction":{"workflows":null} - }"""), false, XContentType.JSON) + }"""), XContentType.JSON) ); assertThat( pex1.getMessage(), @@ -438,11 +441,11 @@ public void testParse() throws Exception { final ElasticsearchParseException pex2 = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test_empty_workflows", new BytesArray(""" + () -> RoleDescriptor.parserBuilder().allowRestriction(true).build().parse("test_empty_workflows", new BytesArray(""" { "index": [{"names": ["idx1"], "privileges": [ "p1", "p2" ]}], "restriction":{"workflows":[]} - }"""), false, XContentType.JSON) + }"""), XContentType.JSON) ); assertThat( pex2.getMessage(), @@ -477,7 +480,7 @@ public void testParsingFieldPermissionsUsesCache() throws IOException { ] } """; - RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON); final int numberOfFieldSecurityBlocks = 2; final Cache.CacheStats betweenStats = fieldPermissionsCache.getCacheStats(); @@ -486,7 +489,7 @@ public void testParsingFieldPermissionsUsesCache() throws IOException { final int iterations = randomIntBetween(1, 5); for (int i = 0; i < iterations; i++) { - RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON); } final Cache.CacheStats afterStats = fieldPermissionsCache.getCacheStats(); @@ -608,12 +611,13 @@ public void testParseRoleWithRestrictionFailsWhenAllowRestrictionIsFalse() { }"""; final ElasticsearchParseException e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse( - "test_role_with_restriction", - XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON), - randomBoolean(), - false - ) + () -> RoleDescriptor.parserBuilder() + .allowRestriction(false) + .build() + .parse( + "test_role_with_restriction", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ) ); assertThat( e, @@ -630,12 +634,13 @@ public void testParseRoleWithRestrictionWhenAllowRestrictionIsTrue() throws IOEx "workflows": ["search_application"] } }"""; - RoleDescriptor role = RoleDescriptor.parse( - "test_role_with_restriction", - XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON), - randomBoolean(), - true - ); + RoleDescriptor role = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .build() + .parse( + "test_role_with_restriction", + XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(json), XContentType.JSON) + ); assertThat(role.getName(), equalTo("test_role_with_restriction")); assertThat(role.hasRestriction(), equalTo(true)); assertThat(role.hasWorkflowsRestriction(), equalTo(true)); @@ -655,7 +660,7 @@ public void testParseEmptyQuery() throws Exception { } ] }"""; - RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + RoleDescriptor rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(1, rd.getIndicesPrivileges().length); @@ -677,7 +682,7 @@ public void testParseNullQuery() throws Exception { } ] }"""; - RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + RoleDescriptor rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(1, rd.getIndicesPrivileges().length); @@ -699,7 +704,7 @@ public void testParseEmptyQueryUsingDeprecatedIndicesField() throws Exception { } ] }"""; - RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + RoleDescriptor rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); assertEquals(1, rd.getIndicesPrivileges().length); @@ -721,7 +726,7 @@ public void testParseIgnoresTransientMetadata() throws Exception { ); XContentBuilder b = jsonBuilder(); descriptor.toXContent(b, ToXContent.EMPTY_PARAMS); - RoleDescriptor parsed = RoleDescriptor.parse("test", BytesReference.bytes(b), false, XContentType.JSON); + RoleDescriptor parsed = RoleDescriptor.parserBuilder().build().parse("test", BytesReference.bytes(b), XContentType.JSON); assertNotNull(parsed); assertEquals(1, parsed.getTransientMetadata().size()); assertEquals(true, parsed.getTransientMetadata().get("enabled")); @@ -745,7 +750,7 @@ public void testParseIndicesPrivilegesSucceedsWhenExceptFieldsIsSubsetOfGrantedF } ] }""", grant, except); - final RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + final RoleDescriptor rd = RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON); assertEquals("test", rd.getName()); assertEquals(1, rd.getIndicesPrivileges().length); assertArrayEquals(new String[] { "idx1", "idx2" }, rd.getIndicesPrivileges()[0].getIndices()); @@ -774,7 +779,7 @@ public void testParseIndicesPrivilegesFailsWhenExceptFieldsAreNotSubsetOfGranted }"""; final ElasticsearchParseException epe = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON) ); assertThat(epe, TestMatchers.throwableWithMessage(containsString("must be a subset of the granted fields "))); assertThat(epe, TestMatchers.throwableWithMessage(containsString("f1"))); @@ -794,7 +799,7 @@ public void testParseRemoteIndicesPrivilegesFailsWhenClustersFieldMissing() { }"""; final ElasticsearchParseException epe = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON) ); assertThat( epe, @@ -817,7 +822,7 @@ public void testParseIndicesPrivilegesFailsWhenClustersFieldPresent() { }"""; final ElasticsearchParseException epe = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder().build().parse("test", new BytesArray(json), XContentType.JSON) ); assertThat( epe, @@ -961,7 +966,7 @@ public void testGlobalPrivilegesOrdering() throws IOException { } } }""", profileNamesString, applicationNamesString); - RoleDescriptor role3 = RoleDescriptor.parse(roleName, new BytesArray(json), false, XContentType.JSON); + RoleDescriptor role3 = RoleDescriptor.parserBuilder().build().parse(roleName, new BytesArray(json), XContentType.JSON); assertThat(role3, is(role1)); json = Strings.format(""" { @@ -978,7 +983,7 @@ public void testGlobalPrivilegesOrdering() throws IOException { } } }""", applicationNamesString, profileNamesString); - RoleDescriptor role4 = RoleDescriptor.parse(roleName, new BytesArray(json), false, XContentType.JSON); + RoleDescriptor role4 = RoleDescriptor.parserBuilder().build().parse(roleName, new BytesArray(json), XContentType.JSON); assertThat(role4, is(role1)); } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java index 4e3a520678f70..d76902efc35b5 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java @@ -323,7 +323,7 @@ private void expectRoleDescriptorInResponse(final Response getRoleResponse, fina throws IOException { final Map actual = responseAsParser(getRoleResponse).map( HashMap::new, - p -> RoleDescriptor.parse(expectedRoleDescriptor.getName(), p, false) + p -> RoleDescriptor.parserBuilder().build().parse(expectedRoleDescriptor.getName(), p) ); assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 0a8fbb1ecffc0..b8f6551f36037 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -2874,12 +2874,14 @@ private void expectRoleDescriptorsForApiKey( for (RoleDescriptor expectedRoleDescriptor : expectedRoleDescriptors) { assertThat(rawRoleDescriptor, hasKey(expectedRoleDescriptor.getName())); final var descriptor = (Map) rawRoleDescriptor.get(expectedRoleDescriptor.getName()); - final var roleDescriptor = RoleDescriptor.parse( - expectedRoleDescriptor.getName(), - XContentTestUtils.convertToXContent(descriptor, XContentType.JSON), - false, - XContentType.JSON - ); + final var roleDescriptor = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .build() + .parse( + expectedRoleDescriptor.getName(), + XContentTestUtils.convertToXContent(descriptor, XContentType.JSON), + XContentType.JSON + ); assertEquals(expectedRoleDescriptor, roleDescriptor); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index fe9c1f37e7d49..f4a314c55acfc 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -247,12 +247,9 @@ public void testServiceAccountApiKey() throws IOException { @SuppressWarnings("unchecked") final Map descriptor = (Map) fleetServerRoleDescriptor.get("elastic/fleet-server"); - final RoleDescriptor roleDescriptor = RoleDescriptor.parse( - "elastic/fleet-server", - XContentTestUtils.convertToXContent(descriptor, XContentType.JSON), - false, - XContentType.JSON - ); + final RoleDescriptor roleDescriptor = RoleDescriptor.parserBuilder() + .build() + .parse("elastic/fleet-server", XContentTestUtils.convertToXContent(descriptor, XContentType.JSON), XContentType.JSON); assertThat(roleDescriptor, equalTo(ServiceAccountService.getServiceAccounts().get("elastic/fleet-server").roleDescriptor())); } @@ -588,12 +585,13 @@ public void testCreateCrossClusterApiKey() throws IOException { final Map roleDescriptors = (Map) document.get("role_descriptors"); assertThat(roleDescriptors.keySet(), contains("cross_cluster")); @SuppressWarnings("unchecked") - final RoleDescriptor actualRoleDescriptor = RoleDescriptor.parse( - "cross_cluster", - XContentTestUtils.convertToXContent((Map) roleDescriptors.get("cross_cluster"), XContentType.JSON), - false, - XContentType.JSON - ); + final RoleDescriptor actualRoleDescriptor = RoleDescriptor.parserBuilder() + .build() + .parse( + "cross_cluster", + XContentTestUtils.convertToXContent((Map) roleDescriptors.get("cross_cluster"), XContentType.JSON), + XContentType.JSON + ); final RoleDescriptor expectedRoleDescriptor = new RoleDescriptor( "cross_cluster", diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index ffacd72b05abf..e4436d4fabe71 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -202,6 +202,8 @@ public class ApiKeyService { Property.NodeScope ); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build(); + private final Clock clock; private final Client client; private final SecurityIndexManager securityIndex; @@ -987,7 +989,7 @@ public List parseRoleDescriptors( XContentType.JSON ) ) { - return RoleDescriptor.parse(name, parser, false); + return ROLE_DESCRIPTOR_PARSER.parse(name, parser); } } catch (IOException e) { throw new UncheckedIOException(e); @@ -1027,7 +1029,7 @@ private static List parseRoleDescriptorsBytes( while (parser.nextToken() != XContentParser.Token.END_OBJECT) { parser.nextToken(); // role name String roleName = parser.currentName(); - roleDescriptors.add(RoleDescriptor.parse(roleName, parser, false)); + roleDescriptors.add(ROLE_DESCRIPTOR_PARSER.parse(roleName, parser)); } } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index d769e44f2d38d..368ec3825c0c2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -67,6 +67,7 @@ public class FileRolesStore implements BiConsumer, ActionListener, ActionListener< private static final Logger logger = LogManager.getLogger(NativeRolesStore.class); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allow2xFormat(true).build(); + private final Settings settings; private final Client client; private final XPackLicenseState licenseState; @@ -482,9 +484,8 @@ static RoleDescriptor transformRole(String id, BytesReference sourceBytes, Logge assert id.startsWith(ROLE_TYPE) : "[" + id + "] does not have role prefix"; final String name = id.substring(ROLE_TYPE.length() + 1); try { - // we pass true as allow2xFormat parameter because we do not want to reject permissions if the field permissions - // are given in 2.x syntax - RoleDescriptor roleDescriptor = RoleDescriptor.parse(name, sourceBytes, true, XContentType.JSON, false); + // we do not want to reject permissions if the field permissions are given in 2.x syntax, hence why we allow2xFormat + RoleDescriptor roleDescriptor = ROLE_DESCRIPTOR_PARSER.parse(name, sourceBytes, XContentType.JSON); final boolean dlsEnabled = Arrays.stream(roleDescriptor.getIndicesPrivileges()) .anyMatch(IndicesPrivileges::isUsingDocumentLevelSecurity); final boolean flsEnabled = Arrays.stream(roleDescriptor.getIndicesPrivileges()) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java index 8fda0f0518c93..572cf70586f7d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java @@ -50,7 +50,10 @@ public interface RequestTranslator { GrantApiKeyRequest translate(RestRequest request) throws IOException; class Default implements RequestTranslator { - private static final ObjectParser PARSER = createParser((n, p) -> RoleDescriptor.parse(n, p, false)); + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder() + .allowRestriction(true) + .build(); + private static final ObjectParser PARSER = createParser(ROLE_DESCRIPTOR_PARSER::parse); protected static ObjectParser createParser( CheckedBiFunction roleDescriptorParser diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java index 4888c0f4c9721..e8eb50e3a6529 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java @@ -212,7 +212,7 @@ private Map getRoleDescriptors(String roleParameter) thr XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); final String roleName = parser.currentName(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - final RoleDescriptor role = RoleDescriptor.parse(roleName, parser, false); + final RoleDescriptor role = RoleDescriptor.parserBuilder().build().parse(roleName, parser); roles.put(roleName, role); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 9f060dd102a04..269031804f7e3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -1866,7 +1866,10 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru final String apiKey3 = randomAlphaOfLength(16); ApiKeyCredentials apiKeyCredentials3 = getApiKeyCredentials(docId3, apiKey3, type); final List keyRoles = List.of( - RoleDescriptor.parse("key-role", new BytesArray("{\"cluster\":[\"monitor\"]}"), true, XContentType.JSON) + RoleDescriptor.parserBuilder() + .allow2xFormat(true) + .build() + .parse("key-role", new BytesArray("{\"cluster\":[\"monitor\"]}"), XContentType.JSON) ); final Map metadata3 = mockKeyDocument( docId3, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsTests.java index b97d71466f181..fc5374fb324ac 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/permission/FieldPermissionsTests.java @@ -37,7 +37,8 @@ public void testParseFieldPermissions() throws Exception { } ] }"""; - RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + RoleDescriptor.Parser roleParser = RoleDescriptor.parserBuilder().build(); + RoleDescriptor rd = roleParser.parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2", "f3", "f4" }); assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] { "f3", "f4" }); @@ -54,7 +55,7 @@ public void testParseFieldPermissions() throws Exception { } ] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = roleParser.parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2", "f3", "f4" }); assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] { "f3", "f4" }); @@ -70,7 +71,7 @@ public void testParseFieldPermissions() throws Exception { } ] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = roleParser.parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2" }); assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); @@ -86,7 +87,7 @@ public void testParseFieldPermissions() throws Exception { } ] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = roleParser.parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); @@ -103,7 +104,7 @@ public void testParseFieldPermissions() throws Exception { } ] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = roleParser.parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] {}); @@ -121,7 +122,7 @@ public void testParseFieldPermissions() throws Exception { }"""; ElasticsearchParseException e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(exceptWithoutGrant), false, XContentType.JSON) + () -> roleParser.parse("test", new BytesArray(exceptWithoutGrant), XContentType.JSON) ); assertThat( e.getDetailedMessage(), @@ -130,10 +131,7 @@ public void testParseFieldPermissions() throws Exception { final String grantNull = """ {"indices": [ {"names": "idx2", "privileges": ["p3"], "field_security": {"grant": null}}]}"""; - e = expectThrows( - ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(grantNull), false, XContentType.JSON) - ); + e = expectThrows(ElasticsearchParseException.class, () -> roleParser.parse("test", new BytesArray(grantNull), XContentType.JSON)); assertThat( e.getDetailedMessage(), containsString("failed to parse indices privileges for" + " role [test]. grant must not be null.") @@ -141,10 +139,7 @@ public void testParseFieldPermissions() throws Exception { final String exceptNull = """ {"indices": [ {"names": "idx2", "privileges": ["p3"], "field_security": {"grant": ["*"],"except": null}}]}"""; - e = expectThrows( - ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(exceptNull), false, XContentType.JSON) - ); + e = expectThrows(ElasticsearchParseException.class, () -> roleParser.parse("test", new BytesArray(exceptNull), XContentType.JSON)); assertThat( e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. except must" + " not be null.") @@ -154,7 +149,7 @@ public void testParseFieldPermissions() throws Exception { {"indices": [ {"names": "idx2", "privileges": ["p3"], "field_security": {"grant": null,"except": null}}]}"""; e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(exceptGrantNull), false, XContentType.JSON) + () -> roleParser.parse("test", new BytesArray(exceptGrantNull), XContentType.JSON) ); assertThat( e.getDetailedMessage(), @@ -165,7 +160,7 @@ public void testParseFieldPermissions() throws Exception { {"indices": [ {"names": "idx2", "privileges": ["p3"], "field_security": {}}]}"""; e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(bothFieldsMissing), false, XContentType.JSON) + () -> roleParser.parse("test", new BytesArray(bothFieldsMissing), XContentType.JSON) ); assertThat( e.getDetailedMessage(), @@ -193,7 +188,7 @@ public void testParseFieldPermissions() throws Exception { } ] }"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); + rd = roleParser.parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); assertArrayEquals(rd.getIndicesPrivileges()[1].getGrantedFields(), new String[] { "*" }); @@ -204,14 +199,14 @@ public void testParseFieldPermissions() throws Exception { public void testBWCFieldPermissions() throws Exception { String q = """ {"indices": [ {"names": "idx2", "privileges": ["p3"], "fields": ["f1", "f2"]}]}"""; - RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), true, XContentType.JSON); + RoleDescriptor rd = RoleDescriptor.parserBuilder().allow2xFormat(true).build().parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2" }); assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); final String failingQuery = q; ElasticsearchParseException e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(failingQuery), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder().allow2xFormat(false).build().parse("test", new BytesArray(failingQuery), XContentType.JSON) ); assertThat(e.getDetailedMessage(), containsString(""" ["fields": [...]] format has changed for field permissions in role [test], \ @@ -219,13 +214,16 @@ public void testBWCFieldPermissions() throws Exception { q = """ {"indices": [ {"names": "idx2", "privileges": ["p3"], "fields": []}]}"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), true, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().allow2xFormat(true).build().parse("test", new BytesArray(q), XContentType.JSON); assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {}); assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); final String failingQuery2 = q; e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(failingQuery2), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder() + .allow2xFormat(false) + .build() + .parse("test", new BytesArray(failingQuery2), XContentType.JSON) ); assertThat(e.getDetailedMessage(), containsString(""" ["fields": [...]] format has changed for field permissions in role [test], \ @@ -233,13 +231,16 @@ public void testBWCFieldPermissions() throws Exception { q = """ {"indices": [ {"names": "idx2", "privileges": ["p3"], "fields": null}]}"""; - rd = RoleDescriptor.parse("test", new BytesArray(q), true, XContentType.JSON); + rd = RoleDescriptor.parserBuilder().allow2xFormat(true).build().parse("test", new BytesArray(q), XContentType.JSON); assertNull(rd.getIndicesPrivileges()[0].getGrantedFields()); assertNull(rd.getIndicesPrivileges()[0].getDeniedFields()); final String failingQuery3 = q; e = expectThrows( ElasticsearchParseException.class, - () -> RoleDescriptor.parse("test", new BytesArray(failingQuery3), false, XContentType.JSON) + () -> RoleDescriptor.parserBuilder() + .allow2xFormat(false) + .build() + .parse("test", new BytesArray(failingQuery3), XContentType.JSON) ); assertThat(e.getDetailedMessage(), containsString(""" ["fields": [...]] format has changed for field permissions in role [test], \ From 82c5eb0e269b849edd4a7595dd97cc97bfa60ec3 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:56:53 +0300 Subject: [PATCH 048/130] Mute SnapshotStatusApisIT.testInfiniteTimeout (#107531) Related to #107405 --- .../java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java index 9ed0f981ef666..f49e46e9b1971 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java @@ -689,6 +689,7 @@ public void testConcurrentCreateAndStatusAPICalls() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107405") public void testInfiniteTimeout() throws Exception { createRepository("test-repo", "mock"); createIndex("test-idx", 1, 0); From af8e4bf26d15af49824cdb94fcd149f8a984b48e Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 16 Apr 2024 09:47:25 -0400 Subject: [PATCH 049/130] [ES|QL] Moving argument compatibility checking for Equals (#105217) Continuing my work from #104490, this PR moves the parameter compatibility checking for Equals into the type resolution check. This is a somewhat bigger change than for Add, as there was no ES|QL base class for binary comparison operators before this. I've added EsqlBinaryComparison as that base class, and migrated all of the binary comparisons to be based off of that (except for NullEquals, see note below). In order to maintain compatibility with the current behavior, I've kept it so that unsigned longs are only inter-operable with other unsigned longs. We've talked a lot about changing that, and I consider this work a prerequisite for that. I've also added a bunch of test cases to Equals and NotEquals, which should have the side effect of filling out the type support table in the equals docs. As noted in the comments, I'll have follow up PRs for the other binary comparisons to add tests, but this PR is already too long. Note about NullEquals: There is an ES|QL NullEquals class, which inherits from the QL version, but I don't think it works. I didn't see any tests or docs for it, and trying it out in the demo instance gave me a syntax error. I think we need to delve into what's going on there, but this PR isn't the right place for it. --- .../src/main/resources/conditional.csv-spec | 7 + .../predicate/operator/comparison/Equals.java | 47 +++-- .../comparison/EsqlBinaryComparison.java | 164 +++++++++++++++ .../operator/comparison/GreaterThan.java | 33 ++- .../comparison/GreaterThanOrEqual.java | 34 +++- .../operator/comparison/LessThan.java | 36 ++-- .../operator/comparison/LessThanOrEqual.java | 31 ++- .../operator/comparison/NotEquals.java | 82 +++++--- .../DateTimeArithmeticOperation.java | 8 +- .../arithmetic/EsqlArithmeticOperation.java | 23 ++- .../function/AbstractFunctionTestCase.java | 49 +++++ .../expression/function/TestCaseSupplier.java | 84 ++++++-- .../operator/arithmetic/AddTests.java | 8 +- .../operator/comparison/EqualsTests.java | 188 +++++++++++++++--- .../comparison/GreaterThanOrEqualTests.java | 21 +- .../operator/comparison/GreaterThanTests.java | 21 +- .../comparison/LessThanOrEqualTests.java | 20 +- .../operator/comparison/LessThanTests.java | 20 +- .../operator/comparison/NotEqualsTests.java | 187 ++++++++++++++--- .../esql/optimizer/OptimizerRulesTests.java | 2 +- 20 files changed, 825 insertions(+), 240 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index f574722f691e5..64a8c1d9da316 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -156,6 +156,9 @@ nullOnMultivaluesComparisonOperation required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; +warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value + a:integer | b:integer | same:boolean 5 | [1, 2] | null @@ -166,6 +169,8 @@ notNullOnMultivaluesComparisonOperation required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; +warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value a:integer | b:integer | same:boolean ; @@ -175,6 +180,8 @@ notNullOnMultivaluesComparisonOperationWithPartialMatch required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; +warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value a:integer | b:integer | same:boolean ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java index 9fb899b8e36df..62eec13af008a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java @@ -8,33 +8,48 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; + +public class Equals extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.BOOLEAN, EqualsBoolsEvaluator.Factory::new), + Map.entry(DataTypes.INTEGER, EqualsIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, EqualsDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, EqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, EqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, EqualsLongsEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_POINT, EqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_POINT, EqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_SHAPE, EqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_SHAPE, EqualsGeometriesEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, EqualsKeywordsEvaluator.Factory::new) + ); -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; - -public class Equals extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals { public Equals(Source source, Expression left, Expression right) { - super(source, left, right); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.EQ, evaluatorMap); } public Equals(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); - } - - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.EQ, zoneId, evaluatorMap); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, Equals::new, left(), right(), zoneId()); } @@ -48,6 +63,11 @@ public Equals swapLeftAndRight() { return new Equals(source(), right(), left(), zoneId()); } + @Override + public BinaryComparison reverse() { + return this; + } + @Override public BinaryComparison negate() { return new NotEquals(source(), left(), right(), zoneId()); @@ -82,4 +102,5 @@ static boolean processBools(boolean lhs, boolean rhs) { static boolean processGeometries(BytesRef lhs, BytesRef rhs) { return lhs.equals(rhs); } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java new file mode 100644 index 0000000000000..58a808893c4c6 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.time.ZoneId; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; + +public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { + + private final Map evaluatorMap; + + protected EsqlBinaryComparison( + Source source, + Expression left, + Expression right, + /* TODO: BinaryComparisonOperator is an enum with a bunch of functionality we don't really want. We should extract an interface and + create a symbol only version like we did for BinaryArithmeticOperation. Ideally, they could be the same class. + */ + BinaryComparisonProcessor.BinaryComparisonOperation operation, + Map evaluatorMap + ) { + this(source, left, right, operation, null, evaluatorMap); + } + + protected EsqlBinaryComparison( + Source source, + Expression left, + Expression right, + BinaryComparisonProcessor.BinaryComparisonOperation operation, + // TODO: We are definitely not doing the right thing with this zoneId + ZoneId zoneId, + Map evaluatorMap + ) { + super(source, left, right, operation, zoneId); + this.evaluatorMap = evaluatorMap; + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + // Our type is always boolean, so figure out the evaluator type from the inputs + DataType commonType = EsqlDataTypeRegistry.INSTANCE.commonType(left().dataType(), right().dataType()); + EvalOperator.ExpressionEvaluator.Factory lhs; + EvalOperator.ExpressionEvaluator.Factory rhs; + + if (commonType.isNumeric()) { + lhs = Cast.cast(source(), left().dataType(), commonType, toEvaluator.apply(left())); + rhs = Cast.cast(source(), right().dataType(), commonType, toEvaluator.apply(right())); + } else { + lhs = toEvaluator.apply(left()); + rhs = toEvaluator.apply(right()); + } + + if (evaluatorMap.containsKey(commonType) == false) { + throw new EsqlIllegalArgumentException("Unsupported type " + left().dataType()); + } + return evaluatorMap.get(commonType).apply(source(), lhs, rhs); + } + + @Override + public Boolean fold() { + return (Boolean) EvaluatorMapper.super.fold(); + } + + @Override + protected TypeResolution resolveType() { + TypeResolution typeResolution = super.resolveType(); + if (typeResolution.unresolved()) { + return typeResolution; + } + + return checkCompatibility(); + } + + @Override + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return TypeResolutions.isType( + e, + evaluatorMap::containsKey, + sourceText(), + paramOrdinal, + evaluatorMap.keySet().stream().map(DataType::typeName).toArray(String[]::new) + ); + } + + /** + * Check if the two input types are compatible for this operation + * + * @return TypeResolution.TYPE_RESOLVED iff the types are compatible. Otherwise, an appropriate type resolution error. + */ + protected TypeResolution checkCompatibility() { + DataType leftType = left().dataType(); + DataType rightType = right().dataType(); + + // Unsigned long is only interoperable with other unsigned longs + if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataTypes.NULL))) + || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataTypes.NULL)))) { + return new TypeResolution(formatIncompatibleTypesMessage()); + } + + if ((leftType.isNumeric() && rightType.isNumeric()) + || (DataTypes.isString(leftType) && DataTypes.isString(rightType)) + || leftType.equals(rightType) + || DataTypes.isNull(leftType) + || DataTypes.isNull(rightType)) { + return TypeResolution.TYPE_RESOLVED; + } + return new TypeResolution(formatIncompatibleTypesMessage()); + } + + public String formatIncompatibleTypesMessage() { + if (left().dataType().equals(UNSIGNED_LONG)) { + return format( + null, + "first argument of [{}] is [unsigned_long] and second is [{}]. " + + "[unsigned_long] can only be operated on together with another [unsigned_long]", + sourceText(), + right().dataType().typeName() + ); + } + if (right().dataType().equals(UNSIGNED_LONG)) { + return format( + null, + "first argument of [{}] is [{}] and second is [unsigned_long]. " + + "[unsigned_long] can only be operated on together with another [unsigned_long]", + sourceText(), + left().dataType().typeName() + ); + } + return format( + null, + "first argument of [{}] is [{}] so second argument must also be [{}] but was [{}]", + sourceText(), + left().dataType().isNumeric() ? "numeric" : left().dataType().typeName(), + left().dataType().isNumeric() ? "numeric" : left().dataType().typeName(), + right().dataType().typeName() + ); + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java index 5683a9d0d7e85..3eca0e858acbf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java @@ -8,29 +8,42 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class GreaterThan extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, GreaterThanIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, GreaterThanDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, GreaterThanKeywordsEvaluator.Factory::new) + ); -public class GreaterThan extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan { - public GreaterThan(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + public GreaterThan(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GT, evaluatorMap); } - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + public GreaterThan(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GT, zoneId, evaluatorMap); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, GreaterThan::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java index ebb29998fb995..f99a85420870b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java @@ -8,30 +8,42 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class GreaterThanOrEqual extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, GreaterThanOrEqualIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, GreaterThanOrEqualDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, GreaterThanOrEqualKeywordsEvaluator.Factory::new) + ); -public class GreaterThanOrEqual extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual { - - public GreaterThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + public GreaterThanOrEqual(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GTE, evaluatorMap); } - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + public GreaterThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GTE, zoneId, evaluatorMap); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, GreaterThanOrEqual::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java index 12f54270b65dc..6b82df1d67da6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java @@ -8,38 +8,44 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class LessThan extends EsqlBinaryComparison implements Negatable { -public class LessThan extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, LessThanIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, LessThanDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, LessThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, LessThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, LessThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, LessThanKeywordsEvaluator.Factory::new) + ); public LessThan(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.LT, zoneId, evaluatorMap); } @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); - } - - @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, LessThan::new, left(), right(), zoneId()); } @Override - protected org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan replaceChildren( - Expression newLeft, - Expression newRight - ) { + protected LessThan replaceChildren(Expression newLeft, Expression newRight) { return new LessThan(source(), newLeft, newRight, zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java index e75733a9e2340..ac6a92aaf097b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java @@ -8,29 +8,38 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class LessThanOrEqual extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, LessThanOrEqualIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, LessThanOrEqualDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, LessThanOrEqualKeywordsEvaluator.Factory::new) + ); -public class LessThanOrEqual extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual { public LessThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.LTE, zoneId, evaluatorMap); } @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); - } - - @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, LessThanOrEqual::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java index 6fbed572cdc01..9c931ec7433eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java @@ -8,45 +8,44 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class NotEquals extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.BOOLEAN, NotEqualsBoolsEvaluator.Factory::new), + Map.entry(DataTypes.INTEGER, NotEqualsIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, NotEqualsDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, NotEqualsLongsEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_POINT, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_POINT, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, NotEqualsKeywordsEvaluator.Factory::new) + ); -public class NotEquals extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals { - public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); - } - - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); - } - - @Override - protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { - return new NotEquals(source(), newLeft, newRight, zoneId()); + public NotEquals(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, evaluatorMap); } - @Override - public NotEquals swapLeftAndRight() { - return new NotEquals(source(), right(), left(), zoneId()); - } - - @Override - public BinaryComparison negate() { - return new Equals(source(), left(), right(), zoneId()); + public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, zoneId, evaluatorMap); } @Evaluator(extraName = "Ints") @@ -78,4 +77,29 @@ static boolean processBools(boolean lhs, boolean rhs) { static boolean processGeometries(BytesRef lhs, BytesRef rhs) { return false == lhs.equals(rhs); } + + @Override + public BinaryComparison reverse() { + return this; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); + } + + @Override + protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { + return new NotEquals(source(), newLeft, newRight, zoneId()); + } + + @Override + public NotEquals swapLeftAndRight() { + return new NotEquals(source(), right(), left(), zoneId()); + } + + @Override + public BinaryComparison negate() { + return new Equals(source(), left(), right(), zoneId()); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 0f550862ed1fa..a45707a0197d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -43,10 +43,10 @@ interface DatetimeArithmeticEvaluator { Expression left, Expression right, OperationSymbol op, - ArithmeticEvaluator ints, - ArithmeticEvaluator longs, - ArithmeticEvaluator ulongs, - ArithmeticEvaluator doubles, + BinaryEvaluator ints, + BinaryEvaluator longs, + BinaryEvaluator ulongs, + BinaryEvaluator doubles, DatetimeArithmeticEvaluator datetimes ) { super(source, left, right, op, ints, longs, ulongs, doubles); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 22f5798e5b1c4..ba283bc4d877b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -71,14 +71,15 @@ public String symbol() { } /** Arithmetic (quad) function. */ - interface ArithmeticEvaluator { + @FunctionalInterface + public interface BinaryEvaluator { ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory lhs, ExpressionEvaluator.Factory rhs); } - private final ArithmeticEvaluator ints; - private final ArithmeticEvaluator longs; - private final ArithmeticEvaluator ulongs; - private final ArithmeticEvaluator doubles; + private final BinaryEvaluator ints; + private final BinaryEvaluator longs; + private final BinaryEvaluator ulongs; + private final BinaryEvaluator doubles; private DataType dataType; @@ -87,10 +88,10 @@ interface ArithmeticEvaluator { Expression left, Expression right, OperationSymbol op, - ArithmeticEvaluator ints, - ArithmeticEvaluator longs, - ArithmeticEvaluator ulongs, - ArithmeticEvaluator doubles + BinaryEvaluator ints, + BinaryEvaluator longs, + BinaryEvaluator ulongs, + BinaryEvaluator doubles ) { super(source, left, right, op); this.ints = ints; @@ -139,7 +140,7 @@ protected TypeResolution checkCompatibility() { return TypeResolution.TYPE_RESOLVED; } - static String formatIncompatibleTypesMessage(String symbol, DataType leftType, DataType rightType) { + public static String formatIncompatibleTypesMessage(String symbol, DataType leftType, DataType rightType) { return format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol, leftType.typeName(), rightType.typeName()); } @@ -152,7 +153,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function errorsForCasesWithoutExamples( return suppliers; } + public static String errorMessageStringForBinaryOperators( + boolean includeOrdinal, + List> validPerPosition, + List types + ) { + try { + return typeErrorMessage(includeOrdinal, validPerPosition, types); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is from the combination + if (types.get(0).equals(DataTypes.UNSIGNED_LONG)) { + return "first argument of [] is [unsigned_long] and second is [" + + types.get(1).typeName() + + "]. [unsigned_long] can only be operated on together with another [unsigned_long]"; + + } + if (types.get(1).equals(DataTypes.UNSIGNED_LONG)) { + return "first argument of [] is [" + + types.get(0).typeName() + + "] and second is [unsigned_long]. [unsigned_long] can only be operated on together with another [unsigned_long]"; + } + return "first argument of [] is [" + + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) + + "] so second argument must also be [" + + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) + + "] but was [" + + types.get(1).typeName() + + "]"; + + } + } + /** * Adds test cases containing unsupported parameter types that immediately fail. */ @@ -931,6 +962,24 @@ protected static String typeErrorMessage(boolean includeOrdinal, List types) { return types.stream().map(t -> "<" + t.typeName() + ">").collect(Collectors.joining(", ")); } + public static List stringCases( + BinaryOperator expected, + BiFunction evaluatorToString, + List warnings, + DataType expectedType + ) { + List lhsSuppliers = new ArrayList<>(); + List rhsSuppliers = new ArrayList<>(); + List suppliers = new ArrayList<>(); + for (DataType type : AbstractConvertFunction.STRING_TYPES) { + lhsSuppliers.addAll(stringCases(type)); + rhsSuppliers.addAll(stringCases(type)); + casesCrossProduct(expected, lhsSuppliers, rhsSuppliers, evaluatorToString, warnings, suppliers, expectedType, true); + } + return suppliers; + } + @Override public TestCase get() { TestCase supplied = supplier.get(); @@ -258,14 +275,14 @@ public static List castToDoubleSuppliersFromRange(Double Min, return suppliers; } - public record NumericTypeTestConfig(Number min, Number max, BinaryOperator expected, String evaluatorName) {} + public record NumericTypeTestConfig(Number min, Number max, BiFunction expected, String evaluatorName) {} - public record NumericTypeTestConfigs( - NumericTypeTestConfig intStuff, - NumericTypeTestConfig longStuff, - NumericTypeTestConfig doubleStuff + public record NumericTypeTestConfigs( + NumericTypeTestConfig intStuff, + NumericTypeTestConfig longStuff, + NumericTypeTestConfig doubleStuff ) { - public NumericTypeTestConfig get(DataType type) { + public NumericTypeTestConfig get(DataType type) { if (type == DataTypes.INTEGER) { return intStuff; } @@ -312,8 +329,8 @@ public static List getSuppliersForNumericType(DataType type, throw new IllegalArgumentException("bogus numeric type [" + type + "]"); } - public static List forBinaryWithWidening( - NumericTypeTestConfigs typeStuff, + public static List forBinaryComparisonWithWidening( + NumericTypeTestConfigs typeStuff, String lhsName, String rhsName, BiFunction> warnings, @@ -325,7 +342,45 @@ public static List forBinaryWithWidening( for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { DataType expected = widen(lhsType, rhsType); - NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + + "[" + + lhsName + + "=" + + getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + rhsName + + "=" + + getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]"; + casesCrossProduct( + (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), + getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max()), + getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max()), + evaluatorToString, + warnings, + suppliers, + DataTypes.BOOLEAN, + true + ); + } + } + return suppliers; + } + + public static List forBinaryWithWidening( + NumericTypeTestConfigs typeStuff, + String lhsName, + String rhsName, + List warnings + ) { + List suppliers = new ArrayList<>(); + List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + + for (DataType lhsType : numericTypes) { + for (DataType rhsType : numericTypes) { + DataType expected = widen(lhsType, rhsType); + NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + "[" + lhsName @@ -885,7 +940,7 @@ public static List doubleCases(double min, double max, boolea return cases; } - private static List booleanCases() { + public static List booleanCases() { return List.of( new TypedDataSupplier("", () -> true, DataTypes.BOOLEAN), new TypedDataSupplier("", () -> false, DataTypes.BOOLEAN) @@ -1267,9 +1322,14 @@ public Matcher evaluatorToString() { * exists because we can't generate random values from the test parameter generation functions, and instead need to return * suppliers which generate the random values at test execution time. */ - public record TypedDataSupplier(String name, Supplier supplier, DataType type) { + public record TypedDataSupplier(String name, Supplier supplier, DataType type, boolean forceLiteral) { + + public TypedDataSupplier(String name, Supplier supplier, DataType type) { + this(name, supplier, type, false); + } + public TypedData get() { - return new TypedData(supplier.get(), type, name); + return new TypedData(supplier.get(), type, name, forceLiteral); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index c40d037890d53..2596959c449db 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -43,20 +43,20 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); suppliers.addAll( TestCaseSupplier.forBinaryWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfigs( + new TestCaseSupplier.NumericTypeTestConfig<>( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> l.intValue() + r.intValue(), "AddIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> l.longValue() + r.longValue(), "AddLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> l.doubleValue() + r.doubleValue(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java index 0a1e9bdfaf34b..6fcc4235f5b79 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java @@ -11,52 +11,188 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class EqualsTests extends AbstractBinaryComparisonTestCase { +public class EqualsTests extends AbstractFunctionTestCase { public EqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int == Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() == r.intValue(), + "EqualsIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() == r.longValue(), + "EqualsLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() == r.doubleValue(), + "EqualsDoublesEvaluator" + ) ), - "EqualsIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + List.of() + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsLongsEvaluator", + "lhs", + "rhs", + Object::equals, DataTypes.BOOLEAN, - equalTo(lhs == rhs) - ); - }))); - } + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsBoolsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.booleanCases(), + TestCaseSupplier.booleanCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsKeywordsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsKeywordsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of() + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsLongsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of() + ) + ); - @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.equals(rhs)); - } + suppliers.addAll( + TestCaseSupplier.stringCases( + Object::equals, + (lhsType, rhsType) -> "EqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new Equals(source, lhs, rhs); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.geoPointCases(), + TestCaseSupplier.geoPointCases(), + List.of() + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.geoShapeCases(), + TestCaseSupplier.geoShapeCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianPointCases(), + TestCaseSupplier.cartesianPointCases(), + List.of() + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianShapeCases(), + TestCaseSupplier.cartesianShapeCases(), + List.of() + ) + ); + + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override - protected boolean isEquality() { - return true; + protected Expression build(Source source, List args) { + return new Equals(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index ad8dba7d63065..f45dedff837c4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -11,26 +11,25 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class GreaterThanOrEqualTests extends AbstractBinaryComparisonTestCase { +public class GreaterThanOrEqualTests extends AbstractFunctionTestCase { public GreaterThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { + // ToDo: Add the full set of typed test cases here return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int >= Int", () -> { int rhs = randomInt(); int lhs = randomInt(); @@ -47,17 +46,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) >= 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new GreaterThanOrEqual(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new GreaterThanOrEqual(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index b631a742f7885..e872af5b7c772 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -11,26 +11,25 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class GreaterThanTests extends AbstractBinaryComparisonTestCase { +public class GreaterThanTests extends AbstractFunctionTestCase { public GreaterThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { + // ToDo: Add the full set of typed test cases here return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int > Int", () -> { int rhs = randomInt(); int lhs = randomInt(); @@ -47,17 +46,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) > 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new GreaterThan(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new GreaterThan(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 7864a0dda9fe3..8bba0c4a5afb5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -11,20 +11,18 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class LessThanOrEqualTests extends AbstractBinaryComparisonTestCase { +public class LessThanOrEqualTests extends AbstractFunctionTestCase { public LessThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -47,17 +45,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) <= 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new LessThanOrEqual(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new LessThanOrEqual(source, args.get(0), args.get(1), null); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index 826e88551077d..ab726dc51fbe4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -11,20 +11,18 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class LessThanTests extends AbstractBinaryComparisonTestCase { +public class LessThanTests extends AbstractFunctionTestCase { public LessThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -47,17 +45,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) < 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new LessThan(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new LessThan(source, args.get(0), args.get(1), null); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java index 0d6bb32fe2488..d6ee5806e0c9e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java @@ -11,53 +11,182 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class NotEqualsTests extends AbstractBinaryComparisonTestCase { +public class NotEqualsTests extends AbstractFunctionTestCase { public NotEqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int != Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() != r.intValue(), + "NotEqualsIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() != r.longValue(), + "NotEqualsLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() != r.doubleValue(), + "NotEqualsDoublesEvaluator" + ) ), - "NotEqualsIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + List.of() + ) + ); + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsLongsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), DataTypes.BOOLEAN, - equalTo(lhs != rhs) - ); - }))); - } - - @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(false == lhs.equals(rhs)); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new NotEquals(source, lhs, rhs, ZoneOffset.UTC); + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsBoolsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.booleanCases(), + TestCaseSupplier.booleanCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of() + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsLongsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.stringCases( + (l, r) -> false == l.equals(r), + (lhsType, rhsType) -> "NotEqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.geoPointCases(), + TestCaseSupplier.geoPointCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.geoShapeCases(), + TestCaseSupplier.geoShapeCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianPointCases(), + TestCaseSupplier.cartesianPointCases(), + List.of() + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianShapeCases(), + TestCaseSupplier.cartesianShapeCases(), + List.of() + ) + ); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override - protected boolean isEquality() { - return true; + protected Expression build(Source source, List args) { + return new NotEquals(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index 01fcd222a5141..28944252191be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -495,7 +495,7 @@ public void testPropagateEquals_VarEq2OrVarNeq5() { // a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { FieldAttribute fa = getFieldAttribute(); - org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals eq = equalsOf(fa, TWO); + Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, THREE, false, FOUR, false); GreaterThan gt = greaterThanOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); From 69e3c22d0039b5380701e2e5e30298859a0b1cf4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 16 Apr 2024 15:57:30 +0200 Subject: [PATCH 050/130] [Transform] Prevent the ResourceNotFoundException from being thrown on force-stop (#107520) --- .../integration/TransformRobustnessIT.java | 45 ++++++++ .../action/TransportStopTransformAction.java | 24 ++-- .../TransportStopTransformActionTests.java | 107 ++++++++++++++++++ 3 files changed, 169 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index a311237b826fb..9c993b9dcb7d8 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -113,6 +113,51 @@ public void testBatchTransformLifecycltInALoop() throws IOException { } } + public void testInterruptedBatchTransformLifecycltInALoop() throws IOException { + createReviewsIndex(); + + String transformId = "test_interrupted_batch_lifecycle_in_a_loop"; + String destIndex = transformId + "-dest"; + for (int i = 0; i < 100; ++i) { + long sleepAfterStartMillis = randomLongBetween(0, 1_000); + boolean force = randomBoolean(); + try { + // Create the batch transform. + createPivotReviewsTransform(transformId, destIndex, null); + assertThat(getTransformTasks(), is(empty())); + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + startTransform(transformId); + // There is 1 transform task after start. + assertThat(getTransformTasks(), hasSize(1)); + assertThat(getTransformTasksFromClusterState(transformId), hasSize(1)); + + Thread.sleep(sleepAfterStartMillis); + + // Stop the transform with force set randomly. + stopTransform(transformId, force); + // After the transform is stopped, there should be no transform task left. + if (force) { + // If the "force" has been used, then the persistent task is removed from the cluster state but the local task can still + // be seen by the PersistentTasksNodeService. We need to wait until PersistentTasksNodeService reconciles the state. + assertBusy(() -> assertThat(getTransformTasks(), is(empty()))); + } else { + // If the "force" hasn't been used then we can expect the local task to be already gone. + assertThat(getTransformTasks(), is(empty())); + } + assertThat(getTransformTasksFromClusterState(transformId), is(empty())); + + // Delete the transform. + deleteTransform(transformId); + } catch (AssertionError | Exception e) { + throw new AssertionError( + format("Failure at iteration %d (sleepAfterStart=%sms,force=%s): %s", i, sleepAfterStartMillis, force, e.getMessage()), + e + ); + } + } + } + public void testContinuousTransformLifecycleInALoop() throws Exception { createReviewsIndex(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index 1996012ccdf58..6868948bb6f0a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -165,6 +165,7 @@ protected void doExecute(Task task, Request request, ActionListener li ); final ActionListener doExecuteListener = cancelTransformTasksListener( + persistentTasksService, transformNodeAssignments.getWaitingForAssignment(), finalListener ); @@ -173,9 +174,8 @@ protected void doExecute(Task task, Request request, ActionListener li // When force==true, we **do not** fan out to individual tasks (i.e. taskOperation method will not be called) as we // want to make sure that the persistent tasks will be removed from cluster state even if these tasks are no longer // visible by the PersistentTasksService. - cancelTransformTasksListener(transformNodeAssignments.getAssigned(), doExecuteListener).onResponse( - new Response(true) - ); + cancelTransformTasksListener(persistentTasksService, transformNodeAssignments.getAssigned(), doExecuteListener) + .onResponse(new Response(true)); } else if (transformNodeAssignments.getExecutorNodes().isEmpty()) { doExecuteListener.onResponse(new Response(true)); } else { @@ -195,6 +195,7 @@ protected void doExecute(Task task, Request request, ActionListener li // found transforms without a config } else if (request.isForce()) { final ActionListener doExecuteListener = cancelTransformTasksListener( + persistentTasksService, transformNodeAssignments.getWaitingForAssignment(), finalListener ); @@ -488,6 +489,7 @@ private void waitForTransformStopped( })); } + // Visible for testing /** * Creates and returns the listener that sends remove request for every task in the given set. * @@ -495,7 +497,8 @@ private void waitForTransformStopped( * @param finalListener listener that should be called once all the given tasks are removed * @return listener that removes given tasks in parallel */ - private ActionListener cancelTransformTasksListener( + static ActionListener cancelTransformTasksListener( + final PersistentTasksService persistentTasksService, final Set transformTasks, final ActionListener finalListener ) { @@ -505,16 +508,23 @@ private ActionListener cancelTransformTasksListener( return ActionListener.wrap(response -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( transformTasks.size(), - ActionListener.wrap(r -> finalListener.onResponse(response), finalListener::onFailure) + ActionListener.wrap(unused -> finalListener.onResponse(response), finalListener::onFailure) ); for (String taskId : transformTasks) { - persistentTasksService.sendRemoveRequest(taskId, null, groupedListener); + persistentTasksService.sendRemoveRequest(taskId, null, ActionListener.wrap(groupedListener::onResponse, e -> { + // If we are about to remove a persistent task which does not exist, treat it as success. + if (e instanceof ResourceNotFoundException) { + groupedListener.onResponse(null); + return; + } + groupedListener.onFailure(e); + })); } }, e -> { GroupedActionListener> groupedListener = new GroupedActionListener<>( transformTasks.size(), - ActionListener.wrap(r -> finalListener.onFailure(e), finalListener::onFailure) + ActionListener.wrap(unused -> finalListener.onFailure(e), finalListener::onFailure) ); for (String taskId : transformTasks) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java index 59959edc7232a..08e0982b2ab84 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java @@ -8,24 +8,50 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.persistent.PersistentTaskResponse; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.persistent.RemovePersistentTaskAction; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; +import org.elasticsearch.xpack.core.transform.action.StopTransformAction; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestStatus.CONFLICT; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportStopTransformActionTests extends ESTestCase { @@ -198,4 +224,85 @@ public void testBuildException() { assertThat(statusException.getSuppressed().length, equalTo(0)); } + public void testCancelTransformTasksListener_NoTasks() { + StopTransformAction.Response responseTrue = new StopTransformAction.Response(true); + + PersistentTasksService persistentTasksService = mock(PersistentTasksService.class); + Set transformTasks = Set.of(); + ActionListener listener = Mockito.>mock(); + + ActionListener cancelTransformTasksListener = TransportStopTransformAction + .cancelTransformTasksListener(persistentTasksService, transformTasks, listener); + cancelTransformTasksListener.onResponse(responseTrue); + verify(listener, times(1)).onResponse(responseTrue); + } + + public void testCancelTransformTasksListener_ThreeTasksRemovedSuccessfully() { + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + Client client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + + // We treat NotFound as a successful removal of the task + doAnswer(randomBoolean() ? withResponse() : withException(new ResourceNotFoundException("task not found"))).when(client) + .execute(same(RemovePersistentTaskAction.INSTANCE), any(), any()); + + PersistentTasksService persistentTasksService = new PersistentTasksService(mock(ClusterService.class), threadPool, client); + Set transformTasks = Set.of("task-A", "task-B", "task-C"); + ActionListener listener = Mockito.>mock(); + + StopTransformAction.Response responseTrue = new StopTransformAction.Response(true); + ActionListener cancelTransformTasksListener = TransportStopTransformAction + .cancelTransformTasksListener(persistentTasksService, transformTasks, listener); + cancelTransformTasksListener.onResponse(responseTrue); + + verify(listener).onResponse(responseTrue); + } + + public void testCancelTransformTasksListener_OneTaskCouldNotBeRemoved() { + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + Client client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + + doAnswer(randomBoolean() ? withResponse() : withException(new ResourceNotFoundException("task not found"))).when(client) + .execute(same(RemovePersistentTaskAction.INSTANCE), eq(new RemovePersistentTaskAction.Request("task-A")), any()); + doAnswer(randomBoolean() ? withResponse() : withException(new ResourceNotFoundException("task not found"))).when(client) + .execute(same(RemovePersistentTaskAction.INSTANCE), eq(new RemovePersistentTaskAction.Request("task-B")), any()); + doAnswer(withException(new IllegalStateException("real issue while removing task"))).when(client) + .execute(same(RemovePersistentTaskAction.INSTANCE), eq(new RemovePersistentTaskAction.Request("task-C")), any()); + + PersistentTasksService persistentTasksService = new PersistentTasksService(mock(ClusterService.class), threadPool, client); + Set transformTasks = Set.of("task-A", "task-B", "task-C"); + ActionListener listener = Mockito.>mock(); + + StopTransformAction.Response responseTrue = new StopTransformAction.Response(true); + ActionListener cancelTransformTasksListener = TransportStopTransformAction + .cancelTransformTasksListener(persistentTasksService, transformTasks, listener); + cancelTransformTasksListener.onResponse(responseTrue); + + ArgumentCaptor exceptionArgumentCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener, times(1)).onFailure(exceptionArgumentCaptor.capture()); + Exception actualException = exceptionArgumentCaptor.getValue(); + assertThat(actualException.getMessage(), containsString("real issue while removing task")); + assertThat(actualException.getSuppressed(), is(emptyArray())); + } + + private static Answer withResponse() { + return invocationOnMock -> { + @SuppressWarnings("unchecked") + var l = (ActionListener) invocationOnMock.getArguments()[2]; + l.onResponse(new PersistentTaskResponse((PersistentTasksCustomMetadata.PersistentTask) null)); + return null; + }; + } + + private static Answer withException(Exception e) { + return invocationOnMock -> { + @SuppressWarnings("unchecked") + var l = (ActionListener) invocationOnMock.getArguments()[2]; + l.onFailure(e); + return null; + }; + } } From 7e18768bbf8dd5f54a2efeb8a0119c471a777158 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 16 Apr 2024 16:22:28 +0200 Subject: [PATCH 051/130] Adjust log level for publish failure in TransportPutShutdownNodeAction (#107489) Publish failures are - to a certain degree - expected as the master might change before the corresponding task is executed. This reduces logging to INFO level for publish failures that will be retried by TransportMasterNodeAction. --- .../xpack/shutdown/TransportPutShutdownNodeAction.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java index 750bb9227cff6..b68a29604be22 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; @@ -82,7 +83,11 @@ private static boolean putShutdownNodeState( record PutShutdownNodeTask(Request request, ActionListener listener) implements ClusterStateTaskListener { @Override public void onFailure(Exception e) { - logger.error(() -> "failed to put shutdown for node [" + request.getNodeId() + "]", e); + if (MasterService.isPublishFailureException(e)) { + logger.info(() -> "failed to put shutdown for node [" + request.getNodeId() + "], attempting retry", e); + } else { + logger.error(() -> "failed to put shutdown for node [" + request.getNodeId() + "]", e); + } listener.onFailure(e); } } From 225edaf6076770385b4d091af89a546020ec5c79 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 16 Apr 2024 10:51:28 -0400 Subject: [PATCH 052/130] Revert "[ES|QL] Moving argument compatibility checking for Equals (#105217)" (#107537) * Revert "[ES|QL] Moving argument compatibility checking for Equals (#105217)" This reverts commit af8e4bf26d15af49824cdb94fcd149f8a984b48e. * Update docs/changelog/107537.yaml --- docs/changelog/107537.yaml | 5 + .../src/main/resources/conditional.csv-spec | 7 - .../predicate/operator/comparison/Equals.java | 47 ++--- .../comparison/EsqlBinaryComparison.java | 164 --------------- .../operator/comparison/GreaterThan.java | 33 +-- .../comparison/GreaterThanOrEqual.java | 34 +--- .../operator/comparison/LessThan.java | 36 ++-- .../operator/comparison/LessThanOrEqual.java | 31 +-- .../operator/comparison/NotEquals.java | 82 +++----- .../DateTimeArithmeticOperation.java | 8 +- .../arithmetic/EsqlArithmeticOperation.java | 23 +-- .../function/AbstractFunctionTestCase.java | 49 ----- .../expression/function/TestCaseSupplier.java | 84 ++------ .../operator/arithmetic/AddTests.java | 8 +- .../operator/comparison/EqualsTests.java | 188 +++--------------- .../comparison/GreaterThanOrEqualTests.java | 21 +- .../operator/comparison/GreaterThanTests.java | 21 +- .../comparison/LessThanOrEqualTests.java | 20 +- .../operator/comparison/LessThanTests.java | 20 +- .../operator/comparison/NotEqualsTests.java | 187 +++-------------- .../esql/optimizer/OptimizerRulesTests.java | 2 +- 21 files changed, 245 insertions(+), 825 deletions(-) create mode 100644 docs/changelog/107537.yaml delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java diff --git a/docs/changelog/107537.yaml b/docs/changelog/107537.yaml new file mode 100644 index 0000000000000..d6d502b394c3b --- /dev/null +++ b/docs/changelog/107537.yaml @@ -0,0 +1,5 @@ +pr: 107537 +summary: "Revert \"[ES|QL] Moving argument compatibility checking for Equals\"" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index 64a8c1d9da316..f574722f691e5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -156,9 +156,6 @@ nullOnMultivaluesComparisonOperation required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; -warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value - a:integer | b:integer | same:boolean 5 | [1, 2] | null @@ -169,8 +166,6 @@ notNullOnMultivaluesComparisonOperation required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; -warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value a:integer | b:integer | same:boolean ; @@ -180,8 +175,6 @@ notNullOnMultivaluesComparisonOperationWithPartialMatch required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; -warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value a:integer | b:integer | same:boolean ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java index 62eec13af008a..9fb899b8e36df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java @@ -8,48 +8,33 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.Negatable; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; -import java.util.Map; - -public class Equals extends EsqlBinaryComparison implements Negatable { - private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.BOOLEAN, EqualsBoolsEvaluator.Factory::new), - Map.entry(DataTypes.INTEGER, EqualsIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, EqualsDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, EqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, EqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, EqualsLongsEvaluator.Factory::new), - Map.entry(EsqlDataTypes.GEO_POINT, EqualsGeometriesEvaluator.Factory::new), - Map.entry(EsqlDataTypes.CARTESIAN_POINT, EqualsGeometriesEvaluator.Factory::new), - Map.entry(EsqlDataTypes.GEO_SHAPE, EqualsGeometriesEvaluator.Factory::new), - Map.entry(EsqlDataTypes.CARTESIAN_SHAPE, EqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, EqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, EqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, EqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, EqualsKeywordsEvaluator.Factory::new) - ); +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; + +public class Equals extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals { public Equals(Source source, Expression left, Expression right) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.EQ, evaluatorMap); + super(source, left, right); } public Equals(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.EQ, zoneId, evaluatorMap); + super(source, left, right, zoneId); + } + + @Override + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, Equals::new, left(), right(), zoneId()); } @@ -63,11 +48,6 @@ public Equals swapLeftAndRight() { return new Equals(source(), right(), left(), zoneId()); } - @Override - public BinaryComparison reverse() { - return this; - } - @Override public BinaryComparison negate() { return new NotEquals(source(), left(), right(), zoneId()); @@ -102,5 +82,4 @@ static boolean processBools(boolean lhs, boolean rhs) { static boolean processGeometries(BytesRef lhs, BytesRef rhs) { return lhs.equals(rhs); } - } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java deleted file mode 100644 index 58a808893c4c6..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; - -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; - -import java.time.ZoneId; -import java.util.Map; -import java.util.function.Function; - -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; -import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; - -public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { - - private final Map evaluatorMap; - - protected EsqlBinaryComparison( - Source source, - Expression left, - Expression right, - /* TODO: BinaryComparisonOperator is an enum with a bunch of functionality we don't really want. We should extract an interface and - create a symbol only version like we did for BinaryArithmeticOperation. Ideally, they could be the same class. - */ - BinaryComparisonProcessor.BinaryComparisonOperation operation, - Map evaluatorMap - ) { - this(source, left, right, operation, null, evaluatorMap); - } - - protected EsqlBinaryComparison( - Source source, - Expression left, - Expression right, - BinaryComparisonProcessor.BinaryComparisonOperation operation, - // TODO: We are definitely not doing the right thing with this zoneId - ZoneId zoneId, - Map evaluatorMap - ) { - super(source, left, right, operation, zoneId); - this.evaluatorMap = evaluatorMap; - } - - @Override - public EvalOperator.ExpressionEvaluator.Factory toEvaluator( - Function toEvaluator - ) { - // Our type is always boolean, so figure out the evaluator type from the inputs - DataType commonType = EsqlDataTypeRegistry.INSTANCE.commonType(left().dataType(), right().dataType()); - EvalOperator.ExpressionEvaluator.Factory lhs; - EvalOperator.ExpressionEvaluator.Factory rhs; - - if (commonType.isNumeric()) { - lhs = Cast.cast(source(), left().dataType(), commonType, toEvaluator.apply(left())); - rhs = Cast.cast(source(), right().dataType(), commonType, toEvaluator.apply(right())); - } else { - lhs = toEvaluator.apply(left()); - rhs = toEvaluator.apply(right()); - } - - if (evaluatorMap.containsKey(commonType) == false) { - throw new EsqlIllegalArgumentException("Unsupported type " + left().dataType()); - } - return evaluatorMap.get(commonType).apply(source(), lhs, rhs); - } - - @Override - public Boolean fold() { - return (Boolean) EvaluatorMapper.super.fold(); - } - - @Override - protected TypeResolution resolveType() { - TypeResolution typeResolution = super.resolveType(); - if (typeResolution.unresolved()) { - return typeResolution; - } - - return checkCompatibility(); - } - - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return TypeResolutions.isType( - e, - evaluatorMap::containsKey, - sourceText(), - paramOrdinal, - evaluatorMap.keySet().stream().map(DataType::typeName).toArray(String[]::new) - ); - } - - /** - * Check if the two input types are compatible for this operation - * - * @return TypeResolution.TYPE_RESOLVED iff the types are compatible. Otherwise, an appropriate type resolution error. - */ - protected TypeResolution checkCompatibility() { - DataType leftType = left().dataType(); - DataType rightType = right().dataType(); - - // Unsigned long is only interoperable with other unsigned longs - if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataTypes.NULL))) - || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataTypes.NULL)))) { - return new TypeResolution(formatIncompatibleTypesMessage()); - } - - if ((leftType.isNumeric() && rightType.isNumeric()) - || (DataTypes.isString(leftType) && DataTypes.isString(rightType)) - || leftType.equals(rightType) - || DataTypes.isNull(leftType) - || DataTypes.isNull(rightType)) { - return TypeResolution.TYPE_RESOLVED; - } - return new TypeResolution(formatIncompatibleTypesMessage()); - } - - public String formatIncompatibleTypesMessage() { - if (left().dataType().equals(UNSIGNED_LONG)) { - return format( - null, - "first argument of [{}] is [unsigned_long] and second is [{}]. " - + "[unsigned_long] can only be operated on together with another [unsigned_long]", - sourceText(), - right().dataType().typeName() - ); - } - if (right().dataType().equals(UNSIGNED_LONG)) { - return format( - null, - "first argument of [{}] is [{}] and second is [unsigned_long]. " - + "[unsigned_long] can only be operated on together with another [unsigned_long]", - sourceText(), - left().dataType().typeName() - ); - } - return format( - null, - "first argument of [{}] is [{}] so second argument must also be [{}] but was [{}]", - sourceText(), - left().dataType().isNumeric() ? "numeric" : left().dataType().typeName(), - left().dataType().isNumeric() ? "numeric" : left().dataType().typeName(), - right().dataType().typeName() - ); - } - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java index 3eca0e858acbf..5683a9d0d7e85 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java @@ -8,42 +8,29 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.Negatable; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; -import java.util.Map; -public class GreaterThan extends EsqlBinaryComparison implements Negatable { - private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, GreaterThanIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, GreaterThanDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, GreaterThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, GreaterThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, GreaterThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, GreaterThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, GreaterThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, GreaterThanKeywordsEvaluator.Factory::new) - ); +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; - public GreaterThan(Source source, Expression left, Expression right) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GT, evaluatorMap); +public class GreaterThan extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan { + public GreaterThan(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, zoneId); } - public GreaterThan(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GT, zoneId, evaluatorMap); + @Override + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, GreaterThan::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java index f99a85420870b..ebb29998fb995 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java @@ -8,42 +8,30 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.Negatable; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; -import java.util.Map; -public class GreaterThanOrEqual extends EsqlBinaryComparison implements Negatable { - private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, GreaterThanOrEqualIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, GreaterThanOrEqualDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, GreaterThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, GreaterThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, GreaterThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, GreaterThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, GreaterThanOrEqualKeywordsEvaluator.Factory::new) - ); +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; - public GreaterThanOrEqual(Source source, Expression left, Expression right) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GTE, evaluatorMap); - } +public class GreaterThanOrEqual extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual { public GreaterThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GTE, zoneId, evaluatorMap); + super(source, left, right, zoneId); + } + + @Override + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, GreaterThanOrEqual::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java index 6b82df1d67da6..12f54270b65dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java @@ -8,44 +8,38 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.Negatable; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; -import java.util.Map; -public class LessThan extends EsqlBinaryComparison implements Negatable { +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; - private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, LessThanIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, LessThanDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, LessThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, LessThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, LessThanLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, LessThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, LessThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, LessThanKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, LessThanKeywordsEvaluator.Factory::new) - ); +public class LessThan extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan { public LessThan(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.LT, zoneId, evaluatorMap); + super(source, left, right, zoneId); } @Override - protected NodeInfo info() { + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + } + + @Override + protected NodeInfo info() { return NodeInfo.create(this, LessThan::new, left(), right(), zoneId()); } @Override - protected LessThan replaceChildren(Expression newLeft, Expression newRight) { + protected org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan replaceChildren( + Expression newLeft, + Expression newRight + ) { return new LessThan(source(), newLeft, newRight, zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java index ac6a92aaf097b..e75733a9e2340 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java @@ -8,38 +8,29 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.Negatable; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; -import java.util.Map; -public class LessThanOrEqual extends EsqlBinaryComparison implements Negatable { - private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.INTEGER, LessThanOrEqualIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, LessThanOrEqualDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, LessThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, LessThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, LessThanOrEqualLongsEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, LessThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, LessThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, LessThanOrEqualKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, LessThanOrEqualKeywordsEvaluator.Factory::new) - ); +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class LessThanOrEqual extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual { public LessThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.LTE, zoneId, evaluatorMap); + super(source, left, right, zoneId); } @Override - protected NodeInfo info() { + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + } + + @Override + protected NodeInfo info() { return NodeInfo.create(this, LessThanOrEqual::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java index 9c931ec7433eb..6fbed572cdc01 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java @@ -8,44 +8,45 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.Negatable; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; -import java.util.Map; -public class NotEquals extends EsqlBinaryComparison implements Negatable { - private static final Map evaluatorMap = Map.ofEntries( - Map.entry(DataTypes.BOOLEAN, NotEqualsBoolsEvaluator.Factory::new), - Map.entry(DataTypes.INTEGER, NotEqualsIntsEvaluator.Factory::new), - Map.entry(DataTypes.DOUBLE, NotEqualsDoublesEvaluator.Factory::new), - Map.entry(DataTypes.LONG, NotEqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.UNSIGNED_LONG, NotEqualsLongsEvaluator.Factory::new), - Map.entry(DataTypes.DATETIME, NotEqualsLongsEvaluator.Factory::new), - Map.entry(EsqlDataTypes.GEO_POINT, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(EsqlDataTypes.CARTESIAN_POINT, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(EsqlDataTypes.GEO_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(EsqlDataTypes.CARTESIAN_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), - Map.entry(DataTypes.KEYWORD, NotEqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.TEXT, NotEqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.VERSION, NotEqualsKeywordsEvaluator.Factory::new), - Map.entry(DataTypes.IP, NotEqualsKeywordsEvaluator.Factory::new) - ); +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; - public NotEquals(Source source, Expression left, Expression right) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, evaluatorMap); +public class NotEquals extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals { + public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, zoneId); } - public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, zoneId, evaluatorMap); + @Override + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); + } + + @Override + protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { + return new NotEquals(source(), newLeft, newRight, zoneId()); + } + + @Override + public NotEquals swapLeftAndRight() { + return new NotEquals(source(), right(), left(), zoneId()); + } + + @Override + public BinaryComparison negate() { + return new Equals(source(), left(), right(), zoneId()); } @Evaluator(extraName = "Ints") @@ -77,29 +78,4 @@ static boolean processBools(boolean lhs, boolean rhs) { static boolean processGeometries(BytesRef lhs, BytesRef rhs) { return false == lhs.equals(rhs); } - - @Override - public BinaryComparison reverse() { - return this; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); - } - - @Override - protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { - return new NotEquals(source(), newLeft, newRight, zoneId()); - } - - @Override - public NotEquals swapLeftAndRight() { - return new NotEquals(source(), right(), left(), zoneId()); - } - - @Override - public BinaryComparison negate() { - return new Equals(source(), left(), right(), zoneId()); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index a45707a0197d5..0f550862ed1fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -43,10 +43,10 @@ interface DatetimeArithmeticEvaluator { Expression left, Expression right, OperationSymbol op, - BinaryEvaluator ints, - BinaryEvaluator longs, - BinaryEvaluator ulongs, - BinaryEvaluator doubles, + ArithmeticEvaluator ints, + ArithmeticEvaluator longs, + ArithmeticEvaluator ulongs, + ArithmeticEvaluator doubles, DatetimeArithmeticEvaluator datetimes ) { super(source, left, right, op, ints, longs, ulongs, doubles); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index ba283bc4d877b..22f5798e5b1c4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -71,15 +71,14 @@ public String symbol() { } /** Arithmetic (quad) function. */ - @FunctionalInterface - public interface BinaryEvaluator { + interface ArithmeticEvaluator { ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory lhs, ExpressionEvaluator.Factory rhs); } - private final BinaryEvaluator ints; - private final BinaryEvaluator longs; - private final BinaryEvaluator ulongs; - private final BinaryEvaluator doubles; + private final ArithmeticEvaluator ints; + private final ArithmeticEvaluator longs; + private final ArithmeticEvaluator ulongs; + private final ArithmeticEvaluator doubles; private DataType dataType; @@ -88,10 +87,10 @@ public interface BinaryEvaluator { Expression left, Expression right, OperationSymbol op, - BinaryEvaluator ints, - BinaryEvaluator longs, - BinaryEvaluator ulongs, - BinaryEvaluator doubles + ArithmeticEvaluator ints, + ArithmeticEvaluator longs, + ArithmeticEvaluator ulongs, + ArithmeticEvaluator doubles ) { super(source, left, right, op); this.ints = ints; @@ -140,7 +139,7 @@ protected TypeResolution checkCompatibility() { return TypeResolution.TYPE_RESOLVED; } - public static String formatIncompatibleTypesMessage(String symbol, DataType leftType, DataType rightType) { + static String formatIncompatibleTypesMessage(String symbol, DataType leftType, DataType rightType) { return format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol, leftType.typeName(), rightType.typeName()); } @@ -153,7 +152,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function errorsForCasesWithoutExamples( return suppliers; } - public static String errorMessageStringForBinaryOperators( - boolean includeOrdinal, - List> validPerPosition, - List types - ) { - try { - return typeErrorMessage(includeOrdinal, validPerPosition, types); - } catch (IllegalStateException e) { - // This means all the positional args were okay, so the expected error is from the combination - if (types.get(0).equals(DataTypes.UNSIGNED_LONG)) { - return "first argument of [] is [unsigned_long] and second is [" - + types.get(1).typeName() - + "]. [unsigned_long] can only be operated on together with another [unsigned_long]"; - - } - if (types.get(1).equals(DataTypes.UNSIGNED_LONG)) { - return "first argument of [] is [" - + types.get(0).typeName() - + "] and second is [unsigned_long]. [unsigned_long] can only be operated on together with another [unsigned_long]"; - } - return "first argument of [] is [" - + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) - + "] so second argument must also be [" - + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) - + "] but was [" - + types.get(1).typeName() - + "]"; - - } - } - /** * Adds test cases containing unsupported parameter types that immediately fail. */ @@ -962,24 +931,6 @@ protected static String typeErrorMessage(boolean includeOrdinal, List types) { return types.stream().map(t -> "<" + t.typeName() + ">").collect(Collectors.joining(", ")); } - public static List stringCases( - BinaryOperator expected, - BiFunction evaluatorToString, - List warnings, - DataType expectedType - ) { - List lhsSuppliers = new ArrayList<>(); - List rhsSuppliers = new ArrayList<>(); - List suppliers = new ArrayList<>(); - for (DataType type : AbstractConvertFunction.STRING_TYPES) { - lhsSuppliers.addAll(stringCases(type)); - rhsSuppliers.addAll(stringCases(type)); - casesCrossProduct(expected, lhsSuppliers, rhsSuppliers, evaluatorToString, warnings, suppliers, expectedType, true); - } - return suppliers; - } - @Override public TestCase get() { TestCase supplied = supplier.get(); @@ -275,14 +258,14 @@ public static List castToDoubleSuppliersFromRange(Double Min, return suppliers; } - public record NumericTypeTestConfig(Number min, Number max, BiFunction expected, String evaluatorName) {} + public record NumericTypeTestConfig(Number min, Number max, BinaryOperator expected, String evaluatorName) {} - public record NumericTypeTestConfigs( - NumericTypeTestConfig intStuff, - NumericTypeTestConfig longStuff, - NumericTypeTestConfig doubleStuff + public record NumericTypeTestConfigs( + NumericTypeTestConfig intStuff, + NumericTypeTestConfig longStuff, + NumericTypeTestConfig doubleStuff ) { - public NumericTypeTestConfig get(DataType type) { + public NumericTypeTestConfig get(DataType type) { if (type == DataTypes.INTEGER) { return intStuff; } @@ -329,8 +312,8 @@ public static List getSuppliersForNumericType(DataType type, throw new IllegalArgumentException("bogus numeric type [" + type + "]"); } - public static List forBinaryComparisonWithWidening( - NumericTypeTestConfigs typeStuff, + public static List forBinaryWithWidening( + NumericTypeTestConfigs typeStuff, String lhsName, String rhsName, BiFunction> warnings, @@ -342,45 +325,7 @@ public static List forBinaryComparisonWithWidening( for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { DataType expected = widen(lhsType, rhsType); - NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); - BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() - + "[" - + lhsName - + "=" - + getCastEvaluator("Attribute[channel=0]", lhs, expected) - + ", " - + rhsName - + "=" - + getCastEvaluator("Attribute[channel=1]", rhs, expected) - + "]"; - casesCrossProduct( - (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), - getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max()), - getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max()), - evaluatorToString, - warnings, - suppliers, - DataTypes.BOOLEAN, - true - ); - } - } - return suppliers; - } - - public static List forBinaryWithWidening( - NumericTypeTestConfigs typeStuff, - String lhsName, - String rhsName, - List warnings - ) { - List suppliers = new ArrayList<>(); - List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); - - for (DataType lhsType : numericTypes) { - for (DataType rhsType : numericTypes) { - DataType expected = widen(lhsType, rhsType); - NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + "[" + lhsName @@ -940,7 +885,7 @@ public static List doubleCases(double min, double max, boolea return cases; } - public static List booleanCases() { + private static List booleanCases() { return List.of( new TypedDataSupplier("", () -> true, DataTypes.BOOLEAN), new TypedDataSupplier("", () -> false, DataTypes.BOOLEAN) @@ -1322,14 +1267,9 @@ public Matcher evaluatorToString() { * exists because we can't generate random values from the test parameter generation functions, and instead need to return * suppliers which generate the random values at test execution time. */ - public record TypedDataSupplier(String name, Supplier supplier, DataType type, boolean forceLiteral) { - - public TypedDataSupplier(String name, Supplier supplier, DataType type) { - this(name, supplier, type, false); - } - + public record TypedDataSupplier(String name, Supplier supplier, DataType type) { public TypedData get() { - return new TypedData(supplier.get(), type, name, forceLiteral); + return new TypedData(supplier.get(), type, name); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 2596959c449db..c40d037890d53 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -43,20 +43,20 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); suppliers.addAll( TestCaseSupplier.forBinaryWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig<>( + new TestCaseSupplier.NumericTypeTestConfigs( + new TestCaseSupplier.NumericTypeTestConfig( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> l.intValue() + r.intValue(), "AddIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig<>( + new TestCaseSupplier.NumericTypeTestConfig( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> l.longValue() + r.longValue(), "AddLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig<>( + new TestCaseSupplier.NumericTypeTestConfig( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> l.doubleValue() + r.doubleValue(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java index 6fcc4235f5b79..0a1e9bdfaf34b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java @@ -11,188 +11,52 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.NumericUtils; +import org.hamcrest.Matcher; -import java.math.BigInteger; -import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -public class EqualsTests extends AbstractFunctionTestCase { +import static org.hamcrest.Matchers.equalTo; + +public class EqualsTests extends AbstractBinaryComparisonTestCase { public EqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - List suppliers = new ArrayList<>(); - suppliers.addAll( - TestCaseSupplier.forBinaryComparisonWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs<>( - new TestCaseSupplier.NumericTypeTestConfig<>( - (Integer.MIN_VALUE >> 1) - 1, - (Integer.MAX_VALUE >> 1) - 1, - (l, r) -> l.intValue() == r.intValue(), - "EqualsIntsEvaluator" - ), - new TestCaseSupplier.NumericTypeTestConfig<>( - (Long.MIN_VALUE >> 1) - 1, - (Long.MAX_VALUE >> 1) - 1, - (l, r) -> l.longValue() == r.longValue(), - "EqualsLongsEvaluator" - ), - new TestCaseSupplier.NumericTypeTestConfig<>( - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - // NB: this has different behavior than Double::equals - (l, r) -> l.doubleValue() == r.doubleValue(), - "EqualsDoublesEvaluator" - ) + return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int == Int", () -> { + int rhs = randomInt(); + int lhs = randomInt(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") ), - "lhs", - "rhs", - List.of() - ) - ); - - // Unsigned Long cases - // TODO: These should be integrated into the type cross product above, but are currently broken - // see https://github.com/elastic/elasticsearch/issues/102935 - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsLongsEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX), - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsBoolsEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.booleanCases(), - TestCaseSupplier.booleanCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsKeywordsEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.ipCases(), - TestCaseSupplier.ipCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsKeywordsEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.versionCases(""), - TestCaseSupplier.versionCases(""), - List.of() - ) - ); - // Datetime - // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsLongsEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), - List.of() - ) - ); - - suppliers.addAll( - TestCaseSupplier.stringCases( - Object::equals, - (lhsType, rhsType) -> "EqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - List.of(), - DataTypes.BOOLEAN - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsGeometriesEvaluator", - "lhs", - "rhs", - Object::equals, + "EqualsIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataTypes.BOOLEAN, - TestCaseSupplier.geoPointCases(), - TestCaseSupplier.geoPointCases(), - List.of() - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsGeometriesEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.geoShapeCases(), - TestCaseSupplier.geoShapeCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsGeometriesEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.cartesianPointCases(), - TestCaseSupplier.cartesianPointCases(), - List.of() - ) - ); + equalTo(lhs == rhs) + ); + }))); + } - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "EqualsGeometriesEvaluator", - "lhs", - "rhs", - Object::equals, - DataTypes.BOOLEAN, - TestCaseSupplier.cartesianShapeCases(), - TestCaseSupplier.cartesianShapeCases(), - List.of() - ) - ); + @Override + protected > Matcher resultMatcher(T lhs, T rhs) { + return equalTo(lhs.equals(rhs)); + } - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) - ); + @Override + protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { + return new Equals(source, lhs, rhs); } @Override - protected Expression build(Source source, List args) { - return new Equals(source, args.get(0), args.get(1)); + protected boolean isEquality() { + return true; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index f45dedff837c4..ad8dba7d63065 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -11,25 +11,26 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; +import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class GreaterThanOrEqualTests extends AbstractFunctionTestCase { +public class GreaterThanOrEqualTests extends AbstractBinaryComparisonTestCase { public GreaterThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - // ToDo: Add the full set of typed test cases here return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int >= Int", () -> { int rhs = randomInt(); int lhs = randomInt(); @@ -46,7 +47,17 @@ public static Iterable parameters() { } @Override - protected Expression build(Source source, List args) { - return new GreaterThanOrEqual(source, args.get(0), args.get(1)); + protected > Matcher resultMatcher(T lhs, T rhs) { + return equalTo(lhs.compareTo(rhs) >= 0); + } + + @Override + protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { + return new GreaterThanOrEqual(source, lhs, rhs, ZoneOffset.UTC); + } + + @Override + protected boolean isEquality() { + return false; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index e872af5b7c772..b631a742f7885 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -11,25 +11,26 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; +import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class GreaterThanTests extends AbstractFunctionTestCase { +public class GreaterThanTests extends AbstractBinaryComparisonTestCase { public GreaterThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - // ToDo: Add the full set of typed test cases here return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int > Int", () -> { int rhs = randomInt(); int lhs = randomInt(); @@ -46,7 +47,17 @@ public static Iterable parameters() { } @Override - protected Expression build(Source source, List args) { - return new GreaterThan(source, args.get(0), args.get(1)); + protected > Matcher resultMatcher(T lhs, T rhs) { + return equalTo(lhs.compareTo(rhs) > 0); + } + + @Override + protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { + return new GreaterThan(source, lhs, rhs, ZoneOffset.UTC); + } + + @Override + protected boolean isEquality() { + return false; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 8bba0c4a5afb5..7864a0dda9fe3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -11,18 +11,20 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; +import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class LessThanOrEqualTests extends AbstractFunctionTestCase { +public class LessThanOrEqualTests extends AbstractBinaryComparisonTestCase { public LessThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -45,7 +47,17 @@ public static Iterable parameters() { } @Override - protected Expression build(Source source, List args) { - return new LessThanOrEqual(source, args.get(0), args.get(1), null); + protected > Matcher resultMatcher(T lhs, T rhs) { + return equalTo(lhs.compareTo(rhs) <= 0); + } + + @Override + protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { + return new LessThanOrEqual(source, lhs, rhs, ZoneOffset.UTC); + } + + @Override + protected boolean isEquality() { + return false; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index ab726dc51fbe4..826e88551077d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -11,18 +11,20 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; +import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class LessThanTests extends AbstractFunctionTestCase { +public class LessThanTests extends AbstractBinaryComparisonTestCase { public LessThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -45,7 +47,17 @@ public static Iterable parameters() { } @Override - protected Expression build(Source source, List args) { - return new LessThan(source, args.get(0), args.get(1), null); + protected > Matcher resultMatcher(T lhs, T rhs) { + return equalTo(lhs.compareTo(rhs) < 0); + } + + @Override + protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { + return new LessThan(source, lhs, rhs, ZoneOffset.UTC); + } + + @Override + protected boolean isEquality() { + return false; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java index d6ee5806e0c9e..0d6bb32fe2488 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java @@ -11,182 +11,53 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; -import java.math.BigInteger; -import java.util.ArrayList; +import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; -public class NotEqualsTests extends AbstractFunctionTestCase { +import static org.hamcrest.Matchers.equalTo; + +public class NotEqualsTests extends AbstractBinaryComparisonTestCase { public NotEqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - List suppliers = new ArrayList<>(); - suppliers.addAll( - - TestCaseSupplier.forBinaryComparisonWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs<>( - new TestCaseSupplier.NumericTypeTestConfig<>( - (Integer.MIN_VALUE >> 1) - 1, - (Integer.MAX_VALUE >> 1) - 1, - (l, r) -> l.intValue() != r.intValue(), - "NotEqualsIntsEvaluator" - ), - new TestCaseSupplier.NumericTypeTestConfig<>( - (Long.MIN_VALUE >> 1) - 1, - (Long.MAX_VALUE >> 1) - 1, - (l, r) -> l.longValue() != r.longValue(), - "NotEqualsLongsEvaluator" - ), - new TestCaseSupplier.NumericTypeTestConfig<>( - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - // NB: this has different behavior than Double::equals - (l, r) -> l.doubleValue() != r.doubleValue(), - "NotEqualsDoublesEvaluator" - ) + return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int != Int", () -> { + int rhs = randomInt(); + int lhs = randomInt(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), + new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") ), - "lhs", - "rhs", - List.of() - ) - ); - // Unsigned Long cases - // TODO: These should be integrated into the type cross product above, but are currently broken - // see https://github.com/elastic/elasticsearch/issues/102935 - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsLongsEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), - TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsBoolsEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.booleanCases(), - TestCaseSupplier.booleanCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsKeywordsEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.ipCases(), - TestCaseSupplier.ipCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsKeywordsEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.versionCases(""), - TestCaseSupplier.versionCases(""), - List.of() - ) - ); - // Datetime - // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsLongsEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.stringCases( - (l, r) -> false == l.equals(r), - (lhsType, rhsType) -> "NotEqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - List.of(), - DataTypes.BOOLEAN - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsGeometriesEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.geoPointCases(), - TestCaseSupplier.geoPointCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsGeometriesEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), + "NotEqualsIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataTypes.BOOLEAN, - TestCaseSupplier.geoShapeCases(), - TestCaseSupplier.geoShapeCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsGeometriesEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.cartesianPointCases(), - TestCaseSupplier.cartesianPointCases(), - List.of() - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - "NotEqualsGeometriesEvaluator", - "lhs", - "rhs", - (l, r) -> false == l.equals(r), - DataTypes.BOOLEAN, - TestCaseSupplier.cartesianShapeCases(), - TestCaseSupplier.cartesianShapeCases(), - List.of() - ) - ); - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) - ); + equalTo(lhs != rhs) + ); + }))); + } + + @Override + protected > Matcher resultMatcher(T lhs, T rhs) { + return equalTo(false == lhs.equals(rhs)); + } + + @Override + protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { + return new NotEquals(source, lhs, rhs, ZoneOffset.UTC); } @Override - protected Expression build(Source source, List args) { - return new NotEquals(source, args.get(0), args.get(1)); + protected boolean isEquality() { + return true; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index 28944252191be..01fcd222a5141 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -495,7 +495,7 @@ public void testPropagateEquals_VarEq2OrVarNeq5() { // a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { FieldAttribute fa = getFieldAttribute(); - Equals eq = equalsOf(fa, TWO); + org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, THREE, false, FOUR, false); GreaterThan gt = greaterThanOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); From 624a5b1fe52c7160f9a2cb4f892a947047ee6bdf Mon Sep 17 00:00:00 2001 From: "Mark J. Hoy" Date: Tue, 16 Apr 2024 10:52:27 -0400 Subject: [PATCH 053/130] Add Docs for Azure OpenAI Embeddings Inference (#107498) * Update docs for Azure OpenAI Embeddings inference * cleanups * update link for dot_product similarity * final cleanups --- docs/changelog/107178.yaml | 5 ++ .../inference/put-inference.asciidoc | 79 ++++++++++++++++++- .../infer-api-ingest-pipeline-widget.asciidoc | 19 ++++- .../infer-api-ingest-pipeline.asciidoc | 28 ++++++- .../infer-api-mapping-widget.asciidoc | 19 ++++- .../inference-api/infer-api-mapping.asciidoc | 43 +++++++++- .../infer-api-reindex-widget.asciidoc | 20 ++++- .../inference-api/infer-api-reindex.asciidoc | 30 ++++++- .../infer-api-requirements-widget.asciidoc | 19 ++++- .../infer-api-requirements.asciidoc | 10 ++- .../infer-api-search-widget.asciidoc | 19 ++++- .../inference-api/infer-api-search.asciidoc | 67 +++++++++++++++- .../infer-api-task-widget.asciidoc | 19 ++++- .../inference-api/infer-api-task.asciidoc | 37 ++++++++- 14 files changed, 394 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/107178.yaml diff --git a/docs/changelog/107178.yaml b/docs/changelog/107178.yaml new file mode 100644 index 0000000000000..94a91357d38e6 --- /dev/null +++ b/docs/changelog/107178.yaml @@ -0,0 +1,5 @@ +pr: 107178 +summary: "Add support for Azure OpenAI embeddings to inference service" +area: Machine Learning +type: feature +issues: [ ] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 332752e52f068..1f73cd08401ee 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -7,8 +7,8 @@ experimental[] Creates an {infer} endpoint to perform an {infer} task. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, or -Hugging Face. For built-in models and models uploaded though +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure +OpenAI or Hugging Face. For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. @@ -42,6 +42,7 @@ The following services are available through the {infer} API: * ELSER * Hugging Face * OpenAI +* Azure OpenAI * Elasticsearch (for built-in models and models uploaded through Eland) @@ -78,6 +79,7 @@ Cohere service. service. * `openai`: specify the `completion` or `text_embedding` task type to use the OpenAI service. +* `azureopenai`: specify the `text_embedding` task type to use the Azure OpenAI service. * `elasticsearch`: specify the `text_embedding` task type to use the E5 built-in model or text embedding models uploaded by Eland. @@ -187,6 +189,41 @@ https://platform.openai.com/account/organization[**Settings** > **Organizations* (Optional, string) The URL endpoint to use for the requests. Can be changed for testing purposes. Defaults to `https://api.openai.com/v1/embeddings`. + +===== ++ +.`service_settings` for the `azureopenai` service +[%collapsible%closed] +===== + +`api_key` or `entra_id`::: +(Required, string) +You must provide _either_ an API key or an Entra ID. +If you do not provide either, or provide both, you will receive an error when trying to create your model. +See the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#authentication[Azure OpenAI Authentication documentation] for more details on these authentication types. + +IMPORTANT: You need to provide the API key or Entra ID only once, during the {infer} model creation. +The <> does not retrieve your authentication credentials. +After creating the {infer} model, you cannot change the associated API key or Entra ID. +If you want to use a different API key or Entra ID, delete the {infer} model and recreate it with the same name and the updated API key. +You _must_ have either an `api_key` or an `entra_id` defined. +If neither are present, an error will occur. + +`resource_name`::: +(Required, string) +The name of your Azure OpenAI resource. +You can find this from the https://portal.azure.com/#view/HubsExtension/BrowseAll[list of resources] in the Azure Portal for your subscription. + +`deployment_id`::: +(Required, string) +The deployment name of your deployed models. +Your Azure OpenAI deployments can be found though the https://oai.azure.com/[Azure OpenAI Studio] portal that is linked to your subscription. + +`api_version`::: +(Required, string) +The Azure API version ID to use. +We recommend using the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#embeddings[latest supported non-preview version]. + ===== + .`service_settings` for the `elasticsearch` service @@ -266,8 +303,17 @@ maximum token length. Defaults to `END`. Valid values are: `user`::: (optional, string) -For `openai` service only. Specifies the user issuing the request, which can be -used for abuse detection. +For `openai` and `azureopenai` service only. Specifies the user issuing the +request, which can be used for abuse detection. + +===== ++ +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. ===== @@ -491,3 +537,28 @@ PUT _inference/completion/openai-completion } ------------------------------------------------------------ // TEST[skip:TBD] + +[discrete] +[[inference-example-azureopenai]] +===== Azure OpenAI service + +The following example shows how to create an {infer} endpoint called +`azure_openai_embeddings` to perform a `text_embedding` task type. +Note that we do not specify a model here, as it is defined already via our Azure OpenAI deployment. + +The list of embeddings models that you can choose from in your deployment can be found in the https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#embeddings[Azure models documentation]. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/azure_openai_embeddings +{ + "service": "azureopenai", + "service_settings": { + "api_key": "", + "resource_name": "", + "deployment_id": "", + "api_version": "2024-02-01" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc index 069dcb61f81b0..4baada19998e8 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc @@ -19,6 +19,12 @@ id="infer-api-ingest-openai"> OpenAI +
    +
    -++++ \ No newline at end of file +++++ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc index 869e41a4ca7d1..f50b866e8a5b1 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc @@ -85,4 +85,30 @@ PUT _ingest/pipeline/openai_embeddings <2> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. -// end::openai[] \ No newline at end of file +// end::openai[] + +// tag::azure-openai[] + +[source,console] +-------------------------------------------------- +PUT _ingest/pipeline/azure_openai_embeddings +{ + "processors": [ + { + "inference": { + "model_id": "azure_openai_embeddings", <1> + "input_output": { <2> + "input_field": "content", + "output_field": "content_embedding" + } + } + } + ] +} +-------------------------------------------------- +<1> The name of the inference endpoint you created by using the +<>, it's referred to as `inference_id` in that step. +<2> Configuration object that defines the `input_field` for the {infer} process +and the `output_field` that will contain the {infer} results. + +// end::azure-openai[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc index 9d94ce880988a..e35ee712b8f56 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc @@ -19,6 +19,12 @@ id="infer-api-mapping-openai"> OpenAI +
    +
    -++++ \ No newline at end of file +++++ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc index 6803b73c06879..037c5957b01ff 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-mapping.asciidoc @@ -84,7 +84,7 @@ PUT openai-embeddings } } -------------------------------------------------- -<1> The name of the field to contain the generated tokens. It must be refrenced +<1> The name of the field to contain the generated tokens. It must be referenced in the {infer} pipeline configuration in the next step. <2> The field to contain the tokens is a `dense_vector` field. <3> The output dimensions of the model. Find this value in the @@ -99,4 +99,43 @@ In this example, the name of the field is `content`. It must be referenced in the {infer} pipeline configuration in the next step. <6> The field type which is text in this example. -// end::openai[] \ No newline at end of file +// end::openai[] + +// tag::azure-openai[] + +[source,console] +-------------------------------------------------- +PUT azure-openai-embeddings +{ + "mappings": { + "properties": { + "content_embedding": { <1> + "type": "dense_vector", <2> + "dims": 1536, <3> + "element_type": "float", + "similarity": "dot_product" <4> + }, + "content": { <5> + "type": "text" <6> + } + } + } +} +-------------------------------------------------- +<1> The name of the field to contain the generated tokens. It must be referenced +in the {infer} pipeline configuration in the next step. +<2> The field to contain the tokens is a `dense_vector` field. +<3> The output dimensions of the model. Find this value in the +https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#embeddings-models[Azure OpenAI documentation] +of the model you use. +<4> For Azure OpenAI embeddings, the `dot_product` function should be used to +calculate similarity as Azure OpenAI embeddings are normalised to unit length. +See the +https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/understand-embeddings[Azure OpenAI embeddings] +documentation for more information on the model specifications. +<5> The name of the field from which to create the dense vector representation. +In this example, the name of the field is `content`. It must be referenced in +the {infer} pipeline configuration in the next step. +<6> The field type which is text in this example. + +// end::azure-openai[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc index 9a78868e44da1..58dac586ba234 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc @@ -19,6 +19,12 @@ id="infer-api-reindex-openai"> OpenAI +
    +
    + -++++ \ No newline at end of file +++++ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc index 118f7f0460924..e97a7187415f1 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-reindex.asciidoc @@ -75,4 +75,32 @@ https://platform.openai.com/account/limits[rate limit of your OpenAI account] may affect the throughput of the reindexing process. If this happens, change `size` to `3` or a similar value in magnitude. -// end::openai[] \ No newline at end of file +// end::openai[] + +// tag::azure-openai[] + +[source,console] +---- +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 50 <1> + }, + "dest": { + "index": "azure-openai-embeddings", + "pipeline": "azure_openai_embeddings" + } +} +---- +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +NOTE: The +https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits#quotas-and-limits-reference[rate limit of your Azure OpenAI account] +may affect the throughput of the reindexing process. If this happens, change +`size` to `3` or a similar value in magnitude. + +// end::azure-openai[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc index c55056cd1a3d2..781ddb43cb352 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc @@ -19,6 +19,12 @@ id="infer-api-requirements-openai"> OpenAI +
    +
    -++++ \ No newline at end of file +++++ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc index 21a9d2111ef74..e67a905e1e97d 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc @@ -17,4 +17,12 @@ API with the HuggingFace service. An https://openai.com/[OpenAI account] is required to use the {infer} API with the OpenAI service. -// end::openai[] \ No newline at end of file +// end::openai[] + +// tag::azure-openai[] +* An https://azure.microsoft.com/free/cognitive-services?azure-portal=true[Azure subscription] +* Access granted to Azure OpenAI in the desired Azure subscription. +You can apply for access to Azure OpenAI by completing the form at https://aka.ms/oai/access. +* An embedding model deployed in https://oai.azure.com/[Azure OpenAI Studio]. + +// end::azure-openai[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc index e945146e22ca4..d3b7ba96bb199 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search-widget.asciidoc @@ -19,6 +19,12 @@ id="infer-api-search-openai"> OpenAI +
    +
    -++++ \ No newline at end of file +++++ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc index 1aa3b6f2f2ae8..04515d0040eaf 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc @@ -209,4 +209,69 @@ query from the `openai-embeddings` index sorted by their proximity to the query: -------------------------------------------------- // NOTCONSOLE -// end::openai[] \ No newline at end of file +// end::openai[] + +// tag::azure-openai[] + +[source,console] +-------------------------------------------------- +GET azure-openai-embeddings/_search +{ + "knn": { + "field": "content_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "azure_openai_embeddings", + "model_text": "Calculate fuel cost" + } + }, + "k": 10, + "num_candidates": 100 + }, + "_source": [ + "id", + "content" + ] +} +-------------------------------------------------- +// TEST[skip:TBD] + +As a result, you receive the top 10 documents that are closest in meaning to the +query from the `openai-embeddings` index sorted by their proximity to the query: + +[source,consol-result] +-------------------------------------------------- +"hits": [ + { + "_index": "azure-openai-embeddings", + "_id": "DDd5OowBHxQKHyc3TDSC", + "_score": 0.83704096, + "_source": { + "id": 862114, + "body": "How to calculate fuel cost for a road trip. By Tara Baukus Mello • Bankrate.com. Dear Driving for Dollars, My family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost.It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes.y family is considering taking a long road trip to finish off the end of the summer, but I'm a little worried about gas prices and our overall fuel cost. It doesn't seem easy to calculate since we'll be traveling through many states and we are considering several routes." + } + }, + { + "_index": "azure-openai-embeddings", + "_id": "ajd5OowBHxQKHyc3TDSC", + "_score": 0.8345704, + "_source": { + "id": 820622, + "body": "Home Heating Calculator. Typically, approximately 50% of the energy consumed in a home annually is for space heating. When deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important.This calculator can help you estimate the cost of fuel for different heating appliances.hen deciding on a heating system, many factors will come into play: cost of fuel, installation cost, convenience and life style are all important. This calculator can help you estimate the cost of fuel for different heating appliances." + } + }, + { + "_index": "azure-openai-embeddings", + "_id": "Djd5OowBHxQKHyc3TDSC", + "_score": 0.8327426, + "_source": { + "id": 8202683, + "body": "Fuel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel.If you are paying $4 per gallon, the trip would cost you $200.Most boats have much larger gas tanks than cars.uel is another important cost. This cost will depend on your boat, how far you travel, and how fast you travel. A 33-foot sailboat traveling at 7 knots should be able to travel 300 miles on 50 gallons of diesel fuel." + } + }, + (...) + ] +-------------------------------------------------- +// NOTCONSOLE + +// end::azure-openai[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc index ebc8d093d01a0..aac26913f955e 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task-widget.asciidoc @@ -19,6 +19,12 @@ id="infer-api-task-openai"> OpenAI +
    +
    -++++ \ No newline at end of file +++++ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc index efbf1f8f25f56..07d5177b60344 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -13,7 +13,7 @@ PUT _inference/text_embedding/cohere_embeddings <1> } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path and the `inference_id` which +<1> The task type is `text_embedding` in the path and the `inference_id` which is the unique identifier of the {infer} endpoint is `cohere_embeddings`. <2> The API key of your Cohere account. You can find your API keys in your Cohere dashboard under the @@ -54,7 +54,7 @@ PUT _inference/text_embedding/hugging_face_embeddings <1> } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path and the `inference_id` which +<1> The task type is `text_embedding` in the path and the `inference_id` which is the unique identifier of the {infer} endpoint is `hugging_face_embeddings`. <2> A valid HuggingFace access token. You can find on the https://huggingface.co/settings/tokens[settings page of your account]. @@ -77,7 +77,7 @@ PUT _inference/text_embedding/openai_embeddings <1> } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path and the `inference_id` which +<1> The task type is `text_embedding` in the path and the `inference_id` which is the unique identifier of the {infer} endpoint is `openai_embeddings`. <2> The API key of your OpenAI account. You can find your OpenAI API keys in your OpenAI account under the @@ -93,4 +93,33 @@ NOTE: When using this model the recommended similarity measure to use in the embeddings are normalized to unit length in which case the `dot_product` and the `cosine` measures are equivalent. -// end::openai[] \ No newline at end of file +// end::openai[] + +// tag::azure-openai[] + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/azure_openai_embeddings <1> +{ + "service": "azureopenai", + "service_settings": { + "api_key": "", <2> + "resource_name": "", <3> + "deployment_id": "", <4> + "api_version": "2024-02-01" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `text_embedding` in the path and the `inference_id` which is the unique identifier of the {infer} endpoint is `azure_openai_embeddings`. +<2> The API key for accessing your Azure OpenAI services. +Alternately, you can provide an `entra_id` instead of an `api_key` here. +The <> does not return this information. +<3> The name our your Azure resource. +<4> The id of your deployed model. + +NOTE: When using this model the recommended similarity measure to use in the +`dense_vector` field mapping is `dot_product`. +In the case of Azure OpenAI models, the embeddings are normalized to unit length in which case the `dot_product` and the `cosine` measures are equivalent. + +// end::azure-openai[] From 1e4d4da483c441455a011f0d1b2ccfe398ceef41 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 16 Apr 2024 17:06:09 +0200 Subject: [PATCH 054/130] ESQL: Make esql version required in REST requests (#107433) * Enable corresponding validation in EsqlQueryRequest. * Add the ESQL version to requests to /_query in integration tests. * In mixed cluster tests for versions prior to 8.13.3, impersonate an 8.13 client and do not send any version. --------- Co-authored-by: Nik Everett --- .../xpack/esql/heap_attack/HeapAttackIT.java | 42 ++++++---- .../ImpersonateOfficialClientTestClient.java | 54 +++++++++++++ .../xpack/esql/EsqlAsyncSecurityIT.java | 1 + .../xpack/esql/EsqlSecurityIT.java | 2 + .../esql/qa/server/mixed-cluster/build.gradle | 9 ++- .../xpack/esql/qa/mixed/EsqlClientYamlIT.java | 76 +++++++++++++++++++ .../xpack/esql/ccq/MultiClustersIT.java | 5 +- .../xpack/esql/qa/single_node/RestEsqlIT.java | 8 +- .../esql/qa/single_node/TSDBRestEsqlIT.java | 5 +- .../single_node/AbstractEsqlClientYamlIT.java | 44 ++++++++++- .../qa/single_node/EsqlClientYamlAsyncIT.java | 44 +---------- .../EsqlClientYamlAsyncSubmitAndFetchIT.java | 2 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 20 +++-- .../esql/qa/rest/FieldExtractorTestCase.java | 68 ++++++----------- .../esql/qa/rest/RestEnrichTestCase.java | 29 +++---- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 70 ++++++++++------- .../rest/generative/GenerativeRestTest.java | 5 +- .../xpack/esql/EsqlTestUtils.java | 7 +- .../action/AbstractEsqlIntegTestCase.java | 29 ++++++- .../action/CrossClustersCancellationIT.java | 2 +- .../esql/action/CrossClustersEnrichIT.java | 2 +- .../esql/action/CrossClustersQueryIT.java | 10 +-- .../xpack/esql/action/EnrichIT.java | 2 +- .../esql/action/EsqlActionBreakerIT.java | 2 +- .../xpack/esql/action/EsqlActionTaskIT.java | 2 +- .../xpack/esql/action/EsqlAsyncActionIT.java | 7 +- .../xpack/esql/action/TimeBasedIndicesIT.java | 14 ++-- .../xpack/esql/action/WarningsIT.java | 2 +- .../xpack/esql/action/EsqlQueryRequest.java | 4 +- .../esql/action/EsqlQueryRequestBuilder.java | 4 + .../esql/action/EsqlQueryRequestTests.java | 1 - .../RemoteClusterSecurityEsqlIT.java | 3 + .../rest-api-spec/test/esql/100_bug_fix.yml | 8 ++ .../rest-api-spec/test/esql/10_basic.yml | 13 ++++ .../rest-api-spec/test/esql/110_all_null.yml | 4 + .../test/esql/110_insensitive_equals.yml | 16 ++++ .../rest-api-spec/test/esql/120_profile.yml | 1 + .../rest-api-spec/test/esql/130_spatial.yml | 17 +++++ .../rest-api-spec/test/esql/20_aggs.yml | 26 +++++++ .../test/esql/25_aggs_on_null.yml | 7 ++ .../rest-api-spec/test/esql/30_types.yml | 34 ++++++++- .../rest-api-spec/test/esql/40_tsdb.yml | 8 ++ .../test/esql/40_unsupported_types.yml | 4 + .../test/esql/45_non_tsdb_counter.yml | 3 + .../test/esql/50_index_patterns.yml | 16 ++++ .../rest-api-spec/test/esql/60_enrich.yml | 4 + .../rest-api-spec/test/esql/60_usage.yml | 1 + .../rest-api-spec/test/esql/61_enrich_ip.yml | 4 + .../test/esql/62_extra_enrich.yml | 3 + .../rest-api-spec/test/esql/70_locale.yml | 2 + .../rest-api-spec/test/esql/80_text.yml | 24 ++++++ .../test/esql/81_text_exact_subfields.yml | 23 ++++-- .../test/esql/90_non_indexed.yml | 1 + .../xpack/restart/FullClusterRestartIT.java | 1 + .../test/querying_cluster/80_esql.yml | 3 + 55 files changed, 600 insertions(+), 198 deletions(-) create mode 100644 test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ImpersonateOfficialClientTestClient.java diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 4f43817b7b92c..8c8eb942f891b 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -56,12 +56,13 @@ * crash Elasticsearch. */ public class HeapAttackIT extends ESRestTestCase { - @ClassRule public static ElasticsearchCluster cluster = Clusters.buildCluster(); static volatile boolean SUITE_ABORTED = false; + private static String ESQL_VERSION = "2024.04.01"; + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); @@ -155,8 +156,8 @@ private Response groupOnManyLongs(int count) throws IOException { } private StringBuilder makeManyLongs(int count) { - StringBuilder query = new StringBuilder(); - query.append("{\"query\":\"FROM manylongs\\n| EVAL i0 = a + b, i1 = b + i0"); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM manylongs\\n| EVAL i0 = a + b, i1 = b + i0"); for (int i = 2; i < count; i++) { query.append(", i").append(i).append(" = i").append(i - 2).append(" + ").append(i - 1); } @@ -186,8 +187,8 @@ public void testHugeConcat() throws IOException { } private Response concat(int evals) throws IOException { - StringBuilder query = new StringBuilder(); - query.append("{\"query\":\"FROM single | EVAL str = TO_STRING(a)"); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM single | EVAL str = TO_STRING(a)"); for (int e = 0; e < evals; e++) { query.append("\n| EVAL str=CONCAT(") .append(IntStream.range(0, 10).mapToObj(i -> "str").collect(Collectors.joining(", "))) @@ -223,8 +224,8 @@ public void testHugeManyConcat() throws IOException { * Tests that generate many moderately long strings. */ private Response manyConcat(int strings) throws IOException { - StringBuilder query = new StringBuilder(); - query.append("{\"query\":\"FROM manylongs | EVAL str = CONCAT("); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM manylongs | EVAL str = CONCAT("); query.append( Arrays.stream(new String[] { "a", "b", "c", "d", "e" }) .map(f -> "TO_STRING(" + f + ")") @@ -274,8 +275,8 @@ public void testTooManyEval() throws IOException { } private Response manyEval(int evalLines) throws IOException { - StringBuilder query = new StringBuilder(); - query.append("{\"query\":\"FROM manylongs"); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM manylongs"); for (int e = 0; e < evalLines; e++) { query.append("\n| EVAL "); for (int i = 0; i < 10; i++) { @@ -356,7 +357,9 @@ public void testFetchTooManyBigFields() throws IOException { * Fetches documents containing 1000 fields which are {@code 1kb} each. */ private void fetchManyBigFields(int docs) throws IOException { - Response response = query("{\"query\": \"FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}", "columns"); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}"); + Response response = query(query.toString(), "columns"); Map map = responseAsMap(response); ListMatcher columns = matchesList(); for (int f = 0; f < 1000; f++) { @@ -383,11 +386,12 @@ public void testAggTooManyMvLongs() throws IOException { } private Response aggMvLongs(int fields) throws IOException { - StringBuilder builder = new StringBuilder("{\"query\": \"FROM mv_longs | STATS MAX(f00) BY f00"); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM mv_longs | STATS MAX(f00) BY f00"); for (int f = 1; f < fields; f++) { - builder.append(", f").append(String.format(Locale.ROOT, "%02d", f)); + query.append(", f").append(String.format(Locale.ROOT, "%02d", f)); } - return query(builder.append("\"}").toString(), "columns"); + return query(query.append("\"}").toString(), "columns"); } public void testFetchMvLongs() throws IOException { @@ -408,7 +412,9 @@ public void testFetchTooManyMvLongs() throws IOException { } private Response fetchMvLongs() throws IOException { - return query("{\"query\": \"FROM mv_longs\"}", "columns"); + StringBuilder query = startQueryWithVersion(ESQL_VERSION); + query.append("FROM mv_longs\"}"); + return query(query.toString(), "columns"); } private void initManyLongs() throws IOException { @@ -576,4 +582,12 @@ public void assertRequestBreakerEmpty() throws Exception { } }); } + + private static StringBuilder startQueryWithVersion(String version) { + StringBuilder query = new StringBuilder(); + query.append("{\"version\":\"" + version + "\","); + query.append("\"query\":\""); + + return query; + } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ImpersonateOfficialClientTestClient.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ImpersonateOfficialClientTestClient.java new file mode 100644 index 0000000000000..34856c8ca93cc --- /dev/null +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ImpersonateOfficialClientTestClient.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.rest.yaml; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.elasticsearch.client.NodeSelector; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.function.BiPredicate; + +/** + * Impersonates an official test client by setting the @{code x-elastic-client-meta} header. + */ +public class ImpersonateOfficialClientTestClient extends ClientYamlTestClient { + private final String meta; + + public ImpersonateOfficialClientTestClient( + ClientYamlSuiteRestSpec restSpec, + RestClient restClient, + List hosts, + CheckedSupplier clientBuilderWithSniffedNodes, + String meta + ) { + super(restSpec, restClient, hosts, clientBuilderWithSniffedNodes); + this.meta = meta; + } + + @Override + public ClientYamlTestResponse callApi( + String apiName, + Map params, + HttpEntity entity, + Map headers, + NodeSelector nodeSelector, + BiPredicate pathPredicate + ) throws IOException { + headers.put("x-elastic-client-meta", meta); + return super.callApi(apiName, params, entity, headers, nodeSelector, pathPredicate); + } +} diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java index 544eb82fb5ace..c7e9c3994ee4b 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java @@ -90,6 +90,7 @@ private Response runAsync(String user, String command) throws IOException { } XContentBuilder json = JsonXContent.contentBuilder(); json.startObject(); + json.field("version", ESQL_VERSION); json.field("query", command); addRandomPragmas(json); json.field("wait_for_completion_timeout", timeValueNanos(randomIntBetween(1, 1000))); diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 7a9b90baa0d35..41df233af6459 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.equalTo; public class EsqlSecurityIT extends ESRestTestCase { + static String ESQL_VERSION = "2024.04.01.🚀"; @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() @@ -354,6 +355,7 @@ protected Response runESQLCommand(String user, String command) throws IOExceptio } XContentBuilder json = JsonXContent.contentBuilder(); json.startObject(); + json.field("version", ESQL_VERSION); json.field("query", command); addRandomPragmas(json); json.endObject(); diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 09397710bb856..c25ef858534e0 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -25,20 +25,27 @@ dependencies { GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") +// ESQL is available in 8.11 or later def supportedVersion = bwcVersion -> { - // ESQL is available in 8.11 or later return bwcVersion.onOrAfter(Version.fromString("8.11.0")); } +// Versions on and after 8.13.3 will get a `version` parameter +def versionUnsupported = bwcVersion -> { + return bwcVersion.before(Version.fromString("8.13.3")); +} + BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> def javaRestTest = tasks.register("v${bwcVersion}#javaRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) + systemProperty("tests.version_parameter_unsupported", versionUnsupported(bwcVersion)) } def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) + systemProperty("tests.version_parameter_unsupported", versionUnsupported(bwcVersion)) testClassesDirs = sourceSets.yamlRestTest.output.classesDirs classpath = sourceSets.yamlRestTest.runtimeClasspath } diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java index 2c9833ba0793e..9bb114aaa6f6c 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java @@ -9,14 +9,28 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.http.HttpHost; +import org.elasticsearch.client.RestClient; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ClientYamlTestClient; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.ImpersonateOfficialClientTestClient; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import org.elasticsearch.test.rest.yaml.section.ApiCallSection; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); @@ -32,6 +46,9 @@ public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { @ParametersFactory public static Iterable parameters() throws Exception { + if (EsqlSpecTestCase.availableVersions().isEmpty()) { + return updateEsqlQueryDoSections(createParameters(), EsqlClientYamlIT::stripVersion); + } return createParameters(); } @@ -40,4 +57,63 @@ public static Iterable parameters() throws Exception { public void assertRequestBreakerEmpty() throws Exception { EsqlSpecTestCase.assertRequestBreakerEmpty(); } + + @Override + protected ClientYamlTestClient initClientYamlTestClient( + final ClientYamlSuiteRestSpec restSpec, + final RestClient restClient, + final List hosts + ) { + if (EsqlSpecTestCase.availableVersions().isEmpty()) { + return new ImpersonateOfficialClientTestClient(restSpec, restClient, hosts, this::getClientBuilderWithSniffedHosts, "es=8.13"); + } + return super.initClientYamlTestClient(restSpec, restClient, hosts); + } + + static DoSection stripVersion(DoSection doSection) { + ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi(doSection.getApiCallSection().getApi()); + for (Map body : copy.getBodies()) { + body.remove("version"); + } + doSection.setApiCallSection(copy); + return doSection; + } + + // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt + public static Iterable updateEsqlQueryDoSections(Iterable parameters, Function modify) + throws Exception { + List result = new ArrayList<>(); + for (Object[] orig : parameters) { + assert orig.length == 1; + ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; + try { + ClientYamlTestSection modified = new ClientYamlTestSection( + candidate.getTestSection().getLocation(), + candidate.getTestSection().getName(), + candidate.getTestSection().getPrerequisiteSection(), + candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() + ); + result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); + } + } + return result; + } + + // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt + private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { + if (false == (e instanceof DoSection)) { + return e; + } + DoSection doSection = (DoSection) e; + String api = doSection.getApiCallSection().getApi(); + return switch (api) { + case "esql.query" -> modify.apply(doSection); + // case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( + // "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." + // ); + default -> e; + }; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 2f681fc23bf31..3a3fbdba74ae8 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.TestFeatureService; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; import org.junit.After; import org.junit.Before; @@ -122,7 +123,9 @@ void indexDocs(RestClient client, String index, List docs) throws IOExcepti } private Map run(String query) throws IOException { - Map resp = runEsql(new RestEsqlTestCase.RequestObjectBuilder().query(query).build()); + Map resp = runEsql( + new RestEsqlTestCase.RequestObjectBuilder().query(query).version(EsqlTestUtils.latestEsqlVersionOrSnapshot()).build() + ); logger.info("--> query {} response {}", query, resp); return resp; } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 6743657e86874..4de2a0f565c71 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -69,7 +69,7 @@ public void testBasicEsql() throws IOException { Response response = client().performRequest(bulk); Assert.assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); - RequestObjectBuilder builder = new RequestObjectBuilder().query(fromIndex() + " | stats avg(value)"); + RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats avg(value)"); if (Build.current().isSnapshot()) { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } @@ -89,7 +89,7 @@ public void testInvalidPragma() throws IOException { request.setJsonEntity("{\"f\":" + i + "}"); assertOK(client().performRequest(request)); } - RequestObjectBuilder builder = new RequestObjectBuilder().query("from test-index | limit 1 | keep f"); + RequestObjectBuilder builder = requestObjectBuilder().query("from test-index | limit 1 | keep f"); builder.pragmas(Settings.builder().put("data_partitioning", "invalid-option").build()); ResponseException re = expectThrows(ResponseException.class, () -> runEsqlSync(builder)); assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("No enum constant")); @@ -99,7 +99,7 @@ public void testInvalidPragma() throws IOException { public void testPragmaNotAllowed() throws IOException { assumeFalse("pragma only disabled on release builds", Build.current().isSnapshot()); - RequestObjectBuilder builder = new RequestObjectBuilder().query("row a = 1, b = 2"); + RequestObjectBuilder builder = requestObjectBuilder().query("row a = 1, b = 2"); builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); ResponseException re = expectThrows(ResponseException.class, () -> runEsqlSync(builder)); assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("[pragma] only allowed in snapshot builds")); @@ -197,7 +197,7 @@ public void testIncompatibleMappingsErrors() throws IOException { } private void assertException(String query, String... errorMessages) throws IOException { - ResponseException re = expectThrows(ResponseException.class, () -> runEsqlSync(new RequestObjectBuilder().query(query))); + ResponseException re = expectThrows(ResponseException.class, () -> runEsqlSync(requestObjectBuilder().query(query))); assertThat(re.getResponse().getStatusLine().getStatusCode(), equalTo(400)); for (var error : errorMessages) { assertThat(re.getMessage(), containsString(error)); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/TSDBRestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/TSDBRestEsqlIT.java index b7ab7b623d460..057119103f0e9 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/TSDBRestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/TSDBRestEsqlIT.java @@ -61,9 +61,8 @@ public void testTimeSeriesQuerying() throws IOException { Response response = client().performRequest(bulk); assertEquals("{\"errors\":false}", EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); - RestEsqlTestCase.RequestObjectBuilder builder = new RestEsqlTestCase.RequestObjectBuilder().query( - "FROM k8s | KEEP k8s.pod.name, @timestamp" - ); + RestEsqlTestCase.RequestObjectBuilder builder = RestEsqlTestCase.requestObjectBuilder() + .query("FROM k8s | KEEP k8s.pod.name, @timestamp"); builder.pragmas(Settings.builder().put("time_series", true).build()); Map result = runEsqlSync(builder); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java index 70afdf32d3808..b2a3b12c2a027 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/AbstractEsqlClientYamlIT.java @@ -11,13 +11,19 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; +import org.elasticsearch.test.rest.yaml.section.DoSection; +import org.elasticsearch.test.rest.yaml.section.ExecutableSection; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; -abstract class AbstractEsqlClientYamlIT extends ESClientYamlSuiteTestCase { +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +abstract class AbstractEsqlClientYamlIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) @@ -44,4 +50,40 @@ private void assertRequestBreakerEmpty() throws Exception { */ EsqlSpecTestCase.assertRequestBreakerEmpty(); } + + public static Iterable updateEsqlQueryDoSections(Iterable parameters, Function modify) + throws Exception { + List result = new ArrayList<>(); + for (Object[] orig : parameters) { + assert orig.length == 1; + ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; + try { + ClientYamlTestSection modified = new ClientYamlTestSection( + candidate.getTestSection().getLocation(), + candidate.getTestSection().getName(), + candidate.getTestSection().getPrerequisiteSection(), + candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() + ); + result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); + } + } + return result; + } + + private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { + if (false == (e instanceof DoSection)) { + return e; + } + DoSection doSection = (DoSection) e; + String api = doSection.getApiCallSection().getApi(); + return switch (api) { + case "esql.query" -> modify.apply(doSection); + case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( + "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." + ); + default -> e; + }; + } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java index 657f396b2857f..f5bd1efb106a3 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncIT.java @@ -10,16 +10,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.section.ApiCallSection; -import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; -import org.elasticsearch.test.rest.yaml.section.DoSection; -import org.elasticsearch.test.rest.yaml.section.ExecutableSection; -import java.util.ArrayList; -import java.util.List; import java.util.Map; -import java.util.function.Function; /** * Run the ESQL yaml tests against the async esql endpoint with a 30 minute {@code wait_until_completion_timeout}. @@ -33,7 +26,7 @@ public EsqlClientYamlAsyncIT(final ClientYamlTestCandidate testCandidate) { @ParametersFactory public static Iterable parameters() throws Exception { - return parameters(doSection -> { + return updateEsqlQueryDoSections(createParameters(), doSection -> { ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi("esql.async_query"); for (Map body : copy.getBodies()) { body.put("wait_for_completion_timeout", "30m"); @@ -42,39 +35,4 @@ public static Iterable parameters() throws Exception { return doSection; }); } - - public static Iterable parameters(Function modify) throws Exception { - List result = new ArrayList<>(); - for (Object[] orig : ESClientYamlSuiteTestCase.createParameters()) { - assert orig.length == 1; - ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; - try { - ClientYamlTestSection modified = new ClientYamlTestSection( - candidate.getTestSection().getLocation(), - candidate.getTestSection().getName(), - candidate.getTestSection().getPrerequisiteSection(), - candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() - ); - result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); - } - } - return result; - } - - private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { - if (false == (e instanceof DoSection)) { - return e; - } - DoSection doSection = (DoSection) e; - String api = doSection.getApiCallSection().getApi(); - return switch (api) { - case "esql.query" -> modify.apply(doSection); - case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( - "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." - ); - default -> e; - }; - } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java index b32a7385d12c5..38051007568e9 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlClientYamlAsyncSubmitAndFetchIT.java @@ -32,7 +32,7 @@ public EsqlClientYamlAsyncSubmitAndFetchIT(final ClientYamlTestCandidate testCan @ParametersFactory public static Iterable parameters() throws Exception { - return EsqlClientYamlAsyncIT.parameters(DoEsqlAsync::new); + return updateEsqlQueryDoSections(createParameters(), DoEsqlAsync::new); } private static class DoEsqlAsync implements ExecutableSection { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 75723ed1d5313..14579dfb537da 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -68,7 +68,14 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { private final Integer lineNumber; protected final CsvTestCase testCase; protected final Mode mode; - protected final Set versions; + + public static Set availableVersions() { + if ("true".equals(System.getProperty("tests.version_parameter_unsupported"))) { + // TODO: skip tests with explicitly set version and/or strip the version if it's 2024.04.01. + return Set.of(); + } + return Build.current().isSnapshot() ? Set.of(EsqlVersion.values()) : Set.of(EsqlVersion.releasedAscending()); + } public enum Mode { SYNC, @@ -101,8 +108,6 @@ protected EsqlSpecTestCase(String fileName, String groupName, String testName, I this.lineNumber = lineNumber; this.testCase = testCase; this.mode = mode; - // TODO: Read applicable versions from csv-spec files/make it part of testCase. - this.versions = Build.current().isSnapshot() ? Set.of(EsqlVersion.values()) : Set.of(EsqlVersion.releasedAscending()); } @Before @@ -150,8 +155,13 @@ protected void shouldSkipTest(String testName) throws IOException { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - EsqlVersion version = randomFrom(versions); - String versionString = randomBoolean() ? version.toString() : version.versionStringWithoutEmoji(); + + String versionString = null; + if (availableVersions().isEmpty() == false) { + EsqlVersion version = randomFrom(availableVersions()); + versionString = randomBoolean() ? version.toString() : version.versionStringWithoutEmoji(); + } + Map answer = runEsql( builder.query(testCase.query).version(versionString), testCase.expectedWarnings(false), diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index d107f8a147fd6..d5daab2d46a80 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.hamcrest.Matcher; import org.junit.Before; @@ -296,7 +297,7 @@ public void testFlattenedUnsupported() throws IOException { new Test("flattened").createIndex("test", "flattened"); index("test", """ {"flattened": {"a": "foo"}}"""); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); + Map result = runEsql("FROM test* | LIMIT 2"); assertMap( result, @@ -310,10 +311,7 @@ public void testEmptyMapping() throws IOException { index("test", """ {}"""); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT missing | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT missing | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat(err, containsString("Unknown column [missing]")); @@ -674,16 +672,13 @@ public void testIncompatibleTypes() throws IOException { index("test2", """ {"f": 1}"""); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test*")); + Map result = runEsql("FROM test*"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("f", "unsupported"))) .entry("values", List.of(matchesList().item(null), matchesList().item(null))) ); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT f | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT f | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat( deyaml(err), @@ -715,7 +710,7 @@ public void testDistinctInEachIndex() throws IOException { index("test2", """ {"other": "o2"}"""); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT file, other")); + Map result = runEsql("FROM test* | SORT file, other"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("file", "keyword"), columnInfo("other", "keyword"))) @@ -769,10 +764,7 @@ public void testMergeKeywordAndObject() throws IOException { index("test2", """ {"file": {"raw": "o2"}}"""); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT file, file.raw | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT file, file.raw | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat( deyaml(err), @@ -782,7 +774,7 @@ public void testMergeKeywordAndObject() throws IOException { ) ); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT file.raw | LIMIT 2")); + Map result = runEsql("FROM test* | SORT file.raw | LIMIT 2"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("file", "unsupported"), columnInfo("file.raw", "keyword"))) @@ -822,15 +814,12 @@ public void testPropagateUnsupportedToSubFields() throws IOException { index("test", """ {"f": "192.168.0.1/24"}"""); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT f, f.raw | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT f, f.raw | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat(err, containsString("Cannot use field [f] with unsupported type [ip_range]")); assertThat(err, containsString("Cannot use field [f.raw] with unsupported type [ip_range]")); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); + Map result = runEsql("FROM test* | LIMIT 2"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))) @@ -888,15 +877,12 @@ public void testMergeUnsupportedAndObject() throws IOException { index("test2", """ {"f": {"raw": "o2"}}"""); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT f, f.raw | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT f, f.raw | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat(err, containsString("Cannot use field [f] with unsupported type [ip_range]")); assertThat(err, containsString("Cannot use field [f.raw] with unsupported type [ip_range]")); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); + Map result = runEsql("FROM test* | LIMIT 2"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))) @@ -931,7 +917,7 @@ public void testIntegerDocValuesConflict() throws IOException { index("test2", """ {"emp_no": 2}"""); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT emp_no | LIMIT 2")); + Map result = runEsql("FROM test* | SORT emp_no | LIMIT 2"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("emp_no", "integer"))) @@ -967,10 +953,7 @@ public void testLongIntegerConflict() throws IOException { index("test2", """ {"emp_no": 2}"""); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT emp_no | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT emp_no | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat( deyaml(err), @@ -980,7 +963,7 @@ public void testLongIntegerConflict() throws IOException { ) ); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); + Map result = runEsql("FROM test* | LIMIT 2"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("emp_no", "unsupported"))) @@ -1016,10 +999,7 @@ public void testIntegerShortConflict() throws IOException { index("test2", """ {"emp_no": 2}"""); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT emp_no | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT emp_no | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat( deyaml(err), @@ -1029,7 +1009,7 @@ public void testIntegerShortConflict() throws IOException { ) ); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); + Map result = runEsql("FROM test* | LIMIT 2"); assertMap( result, matchesMap().entry("columns", List.of(columnInfo("emp_no", "unsupported"))) @@ -1071,13 +1051,10 @@ public void testTypeConflictInObject() throws IOException { index("test2", """ {"foo": {"emp_no": "cat"}}"""); - Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 3")); + Map result = runEsql("FROM test* | LIMIT 3"); assertMap(result, matchesMap().entry("columns", List.of(columnInfo("foo.emp_no", "unsupported"))).extraOk()); - ResponseException e = expectThrows( - ResponseException.class, - () -> runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | SORT foo.emp_no | LIMIT 3")) - ); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT foo.emp_no | LIMIT 3")); String err = EntityUtils.toString(e.getResponse().getEntity()); assertThat( deyaml(err), @@ -1413,7 +1390,7 @@ private void fieldMapping(XContentBuilder builder) throws IOException { } private Map fetchAll() throws IOException { - return runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 10")); + return runEsql("FROM test* | LIMIT 10"); } } @@ -1456,4 +1433,9 @@ private static void createIndex(String name, CheckedConsumer runEsql(String query) throws IOException { + return runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query(query).version(EsqlTestUtils.latestEsqlVersionOrSnapshot())); + } + } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index a670b11c61780..07abc26e8c789 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.junit.After; import org.junit.Before; @@ -142,10 +143,7 @@ public void wipeTestData() throws IOException { } public void testNonExistentEnrichPolicy() throws IOException { - ResponseException re = expectThrows( - ResponseException.class, - () -> RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris")) - ); + ResponseException re = expectThrows(ResponseException.class, () -> runEsql("from test | enrich countris", Mode.SYNC)); assertThat( EntityUtils.toString(re.getResponse().getEntity()), containsString("cannot find enrich policy [countris], did you mean [countries]?") @@ -153,12 +151,7 @@ public void testNonExistentEnrichPolicy() throws IOException { } public void testNonExistentEnrichPolicy_KeepField() throws IOException { - ResponseException re = expectThrows( - ResponseException.class, - () -> RestEsqlTestCase.runEsqlSync( - new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris | keep number") - ) - ); + ResponseException re = expectThrows(ResponseException.class, () -> runEsql("from test | enrich countris | keep number", Mode.SYNC)); assertThat( EntityUtils.toString(re.getResponse().getEntity()), containsString("cannot find enrich policy [countris], did you mean [countries]?") @@ -166,9 +159,7 @@ public void testNonExistentEnrichPolicy_KeepField() throws IOException { } public void testMatchField_ImplicitFieldsList() throws IOException { - Map result = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number | sort number") - ); + Map result = runEsql("from test | enrich countries | keep number | sort number"); var columns = List.of(Map.of("name", "number", "type", "long")); var values = List.of(List.of(1000), List.of(1000), List.of(5000)); @@ -176,17 +167,19 @@ public void testMatchField_ImplicitFieldsList() throws IOException { } public void testMatchField_ImplicitFieldsList_WithStats() throws IOException { - Map result = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | stats s = sum(number) by country_name") - - ); + Map result = runEsql("from test | enrich countries | stats s = sum(number) by country_name"); var columns = List.of(Map.of("name", "s", "type", "long"), Map.of("name", "country_name", "type", "keyword")); var values = List.of(List.of(2000, "United States of America"), List.of(5000, "China")); assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); } - private Map runEsql(RestEsqlTestCase.RequestObjectBuilder requestObject) throws IOException { + private Map runEsql(String query) throws IOException { + return runEsql(query, mode); + } + + private Map runEsql(String query, Mode mode) throws IOException { + var requestObject = new RestEsqlTestCase.RequestObjectBuilder().query(query).version(EsqlTestUtils.latestEsqlVersionOrSnapshot()); if (mode == Mode.ASYNC) { return RestEsqlTestCase.runEsqlAsync(requestObject); } else { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 86d48aca3baed..76fbbcfb71d79 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -113,6 +113,7 @@ protected RestEsqlTestCase(Mode mode) { public static class RequestObjectBuilder { private final XContentBuilder builder; private boolean isBuilt = false; + private String version; private Boolean keepOnCompletion = null; @@ -131,7 +132,7 @@ public RequestObjectBuilder query(String query) throws IOException { } public RequestObjectBuilder version(String version) throws IOException { - builder.field("version", version); + this.version = version; return this; } @@ -179,6 +180,9 @@ public RequestObjectBuilder pragmas(Settings pragmas) throws IOException { public RequestObjectBuilder build() throws IOException { if (isBuilt == false) { + if (version != null) { + builder.field("version", version); + } builder.endObject(); isBuilt = true; } @@ -203,7 +207,7 @@ public static RequestObjectBuilder jsonBuilder() throws IOException { } public void testGetAnswer() throws IOException { - Map answer = runEsql(builder().query("row a = 1, b = 2")); + Map answer = runEsql(requestObjectBuilder().query("row a = 1, b = 2")); assertEquals(2, answer.size()); Map colA = Map.of("name", "a", "type", "integer"); Map colB = Map.of("name", "b", "type", "integer"); @@ -212,7 +216,7 @@ public void testGetAnswer() throws IOException { } public void testUseUnknownIndex() throws IOException { - ResponseException e = expectThrows(ResponseException.class, () -> runEsql(builder().query("from doesNotExist"))); + ResponseException e = expectThrows(ResponseException.class, () -> runEsql(requestObjectBuilder().query("from doesNotExist"))); assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); assertThat(e.getMessage(), containsString("verification_exception")); assertThat(e.getMessage(), containsString("Unknown index [doesNotExist]")); @@ -236,7 +240,7 @@ private void useKnownIndexWithOther(String other, String option) throws IOExcept String q = fromIndex() + ',' + other; q += " OPTIONS \"" + option + "\"=\"" + o + "\""; q += " | KEEP keyword, integer | SORT integer asc | LIMIT 10"; - return builder().query(q); + return requestObjectBuilder().query(q); }; // test failure @@ -258,7 +262,7 @@ private void useUnknownIndex(String option) { CheckedFunction builder = o -> { String q = "FROM doesnotexist OPTIONS \"" + option + "\"=\"" + o + "\""; q += " | KEEP keyword, integer | SORT integer asc | LIMIT 10"; - return builder().query(q); + return requestObjectBuilder().query(q); }; // test failure 404 from resolver @@ -285,7 +289,7 @@ public void testSearchPreference() throws IOException { q += " OPTIONS " + o; } q += " | KEEP keyword, integer | SORT integer asc | LIMIT 10"; - return builder().query(q); + return requestObjectBuilder().query(q); }; // verify that it returns as expected @@ -319,14 +323,14 @@ public void testNullInAggs() throws IOException { Response response = client().performRequest(bulk); assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo("{\"errors\":false}")); - RequestObjectBuilder builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value)"); + RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats min(value)"); Map result = runEsql(builder); assertMap( result, matchesMap().entry("values", List.of(List.of(1))).entry("columns", List.of(Map.of("name", "min(value)", "type", "long"))) ); - builder = new RequestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`"); + builder = requestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`"); result = runEsql(builder); assertMap( result, @@ -340,7 +344,7 @@ public void testColumnarMode() throws IOException { bulkLoadTestData(docCount); boolean columnar = randomBoolean(); - var query = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc"); + var query = requestObjectBuilder().query(fromIndex() + " | keep keyword, integer | sort integer asc"); if (columnar || randomBoolean()) { query.columnar(columnar); } @@ -370,27 +374,27 @@ public void testColumnarMode() throws IOException { public void testTextMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); + var builder = requestObjectBuilder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("txt", count, null), runEsqlAsTextWithFormat(builder, "txt", null)); } public void testCSVMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); + var builder = requestObjectBuilder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("csv", count, '|'), runEsqlAsTextWithFormat(builder, "csv", '|')); } public void testTSVMode() throws IOException { int count = randomIntBetween(0, 100); bulkLoadTestData(count); - var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); + var builder = requestObjectBuilder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); assertEquals(expectedTextBody("tsv", count, null), runEsqlAsTextWithFormat(builder, "tsv", null)); } public void testCSVNoHeaderMode() throws IOException { bulkLoadTestData(1); - var builder = builder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); + var builder = requestObjectBuilder().query(fromIndex() + " | keep keyword, integer | sort integer asc | limit 100"); Request request = prepareRequest(SYNC); String mediaType = attachBody(builder.build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); @@ -448,7 +452,7 @@ public void testOutOfRangeComparisons() throws IOException { for (String fieldWithType : dataTypes) { for (String truePredicate : trueForSingleValuesPredicates) { String comparison = fieldWithType + truePredicate; - var query = builder().query(format(null, "from {} | where {}", testIndexName(), comparison)); + var query = requestObjectBuilder().query(format(null, "from {} | where {}", testIndexName(), comparison)); List expectedWarnings = List.of( "Line 1:29: evaluation of [" + comparison + "] failed, treating result as null. Only first 20 failures recorded.", "Line 1:29: java.lang.IllegalArgumentException: single-value function encountered multi-value" @@ -465,7 +469,7 @@ public void testOutOfRangeComparisons() throws IOException { for (String falsePredicate : alwaysFalsePredicates) { String comparison = fieldWithType + falsePredicate; - var query = builder().query(format(null, "from {} | where {}", testIndexName(), comparison)); + var query = requestObjectBuilder().query(format(null, "from {} | where {}", testIndexName(), comparison)); var result = runEsql(query); var values = as(result.get("values"), ArrayList.class); @@ -481,7 +485,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { Request request = prepareRequest(SYNC); var query = fromIndex() + " | sort integer asc | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; - var mediaType = attachBody(new RequestObjectBuilder().query(query).build(), request); + var mediaType = attachBody(requestObjectBuilder().query(query).build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); options.setWarningsHandler(WarningsHandler.PERMISSIVE); @@ -521,7 +525,7 @@ public void testMetadataFieldsOnMultipleIndices() throws IOException { assertEquals(201, client().performRequest(request).getStatusLine().getStatusCode()); var query = fromIndex() + "* metadata _index, _version, _id | sort _version"; - Map result = runEsql(new RequestObjectBuilder().query(query)); + Map result = runEsql(requestObjectBuilder().query(query)); var columns = List.of( Map.of("name", "a", "type", "long"), Map.of("name", "_index", "type", "keyword"), @@ -539,7 +543,7 @@ public void testMetadataFieldsOnMultipleIndices() throws IOException { public void testErrorMessageForEmptyParams() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsql(new RequestObjectBuilder().query("row a = 1 | eval x = ?").params("[]")) + () -> runEsql(requestObjectBuilder().query("row a = 1 | eval x = ?").params("[]")) ); assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("Not enough actual parameters 0")); } @@ -547,7 +551,7 @@ public void testErrorMessageForEmptyParams() throws IOException { public void testErrorMessageForInvalidParams() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsql(new RequestObjectBuilder().query("row a = 1").params("[{\"x\":\"y\"}]")) + () -> runEsql(requestObjectBuilder().query("row a = 1").params("[{\"x\":\"y\"}]")) ); assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("Required [value, type]")); } @@ -555,7 +559,7 @@ public void testErrorMessageForInvalidParams() throws IOException { public void testErrorMessageForMissingTypeInParams() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsql(new RequestObjectBuilder().query("row a = 1").params("[\"x\", 123, true, {\"value\": \"y\"}]")) + () -> runEsql(requestObjectBuilder().query("row a = 1").params("[\"x\", 123, true, {\"value\": \"y\"}]")) ); assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("Required [type]")); } @@ -563,7 +567,7 @@ public void testErrorMessageForMissingTypeInParams() throws IOException { public void testErrorMessageForMissingValueInParams() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsql(new RequestObjectBuilder().query("row a = 1").params("[\"x\", 123, true, {\"type\": \"y\"}]")) + () -> runEsql(requestObjectBuilder().query("row a = 1").params("[\"x\", 123, true, {\"type\": \"y\"}]")) ); assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("Required [value]")); } @@ -571,7 +575,7 @@ public void testErrorMessageForMissingValueInParams() throws IOException { public void testErrorMessageForInvalidTypeInParams() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsqlSync(new RequestObjectBuilder().query("row a = 1 | eval x = ?").params("[{\"type\": \"byte\", \"value\": 5}]")) + () -> runEsqlSync(requestObjectBuilder().query("row a = 1 | eval x = ?").params("[{\"type\": \"byte\", \"value\": 5}]")) ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), @@ -608,7 +612,7 @@ public void testErrorMessageForLiteralDateMathOverflowOnNegation() throws IOExce private void assertExceptionForDateMath(String dateMathString, String errorSubstring) throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsql(new RequestObjectBuilder().query("row a = 1 | eval x = now() + (" + dateMathString + ")")) + () -> runEsql(requestObjectBuilder().query("row a = 1 | eval x = now() + (" + dateMathString + ")")) ); String responseMessage = EntityUtils.toString(re.getResponse().getEntity()); @@ -621,9 +625,7 @@ private void assertExceptionForDateMath(String dateMathString, String errorSubst public void testErrorMessageForArrayValuesInParams() throws IOException { ResponseException re = expectThrows( ResponseException.class, - () -> runEsql( - new RequestObjectBuilder().query("row a = 1 | eval x = ?").params("[{\"type\": \"integer\", \"value\": [5, 6, 7]}]") - ) + () -> runEsql(requestObjectBuilder().query("row a = 1 | eval x = ?").params("[{\"type\": \"integer\", \"value\": [5, 6, 7]}]")) ); assertThat( EntityUtils.toString(re.getResponse().getEntity()), @@ -698,6 +700,10 @@ public static Map runEsqlSync( RequestOptions.Builder options = request.getOptions().toBuilder(); options.setWarningsHandler(WarningsHandler.PERMISSIVE); // We assert the warnings ourselves options.addHeader("Content-Type", mediaType); + if ("true".equals(System.getProperty("tests.version_parameter_unsupported"))) { + // Masquerade as an old version of the official client, so we get the oldest version by default + options.addHeader("x-elastic-client-meta", "es=8.13"); + } if (randomBoolean()) { options.addHeader("Accept", mediaType); @@ -723,6 +729,10 @@ public static Map runEsqlAsync( RequestOptions.Builder options = request.getOptions().toBuilder(); options.setWarningsHandler(WarningsHandler.PERMISSIVE); // We assert the warnings ourselves options.addHeader("Content-Type", mediaType); + if ("true".equals(System.getProperty("tests.version_parameter_unsupported"))) { + // Masquerade as an old version of the official client, so we get the oldest version by default + options.addHeader("x-elastic-client-meta", "es=8.13"); + } if (randomBoolean()) { options.addHeader("Accept", mediaType); @@ -1005,8 +1015,12 @@ private static String repeatValueAsMV(Object value) { return "[" + value + ", " + value + "]"; } - private static RequestObjectBuilder builder() throws IOException { - return new RequestObjectBuilder(); + public static RequestObjectBuilder requestObjectBuilder(String version) throws IOException { + return new RequestObjectBuilder().version(version); + } + + public static RequestObjectBuilder requestObjectBuilder() throws IOException { + return requestObjectBuilder(EsqlTestUtils.latestEsqlVersionOrSnapshot()); } @After diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index cdec7752aef59..e562c1cfa72e6 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.esql.CsvTestsDataLoader; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; +import org.elasticsearch.xpack.esql.version.EsqlVersion; import org.junit.AfterClass; import org.junit.Before; @@ -96,7 +97,9 @@ private void checkException(EsqlQueryGenerator.QueryExecuted query) { private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { try { - Map a = RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query(command).build()); + Map a = RestEsqlTestCase.runEsqlSync( + new RestEsqlTestCase.RequestObjectBuilder().query(command).version(EsqlVersion.ROCKET.toString()).build() + ); List outputSchema = outputSchema(a); return new EsqlQueryGenerator.QueryExecuted(command, depth, outputSchema, null); } catch (Exception e) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index e6470e0eb2d05..5113346baf0ac 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -23,6 +24,7 @@ import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; +import org.elasticsearch.xpack.esql.version.EsqlVersion; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -54,9 +56,12 @@ import static org.junit.Assert.assertTrue; public final class EsqlTestUtils { + public static String latestEsqlVersionOrSnapshot() { + EsqlVersion version = Build.current().isSnapshot() ? EsqlVersion.SNAPSHOT : EsqlVersion.latestReleased(); + return version.toString(); + } public static class TestSearchStats extends SearchStats { - public TestSearchStats() { super(emptyList()); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index a9238d202e5b5..04a752e79b2f4 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.plugin.TransportEsqlQueryAction; @@ -39,6 +40,21 @@ @TestLogging(value = "org.elasticsearch.xpack.esql.session:DEBUG", reason = "to better understand planning") public abstract class AbstractEsqlIntegTestCase extends ESIntegTestCase { + public static EsqlQueryRequest asyncSyncRequestOnLatestVersion() { + EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest(); + applyLatestVersion(request); + return request; + } + + public static EsqlQueryRequest syncRequestOnLatestVersion() { + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + applyLatestVersion(request); + return request; + } + + private static void applyLatestVersion(EsqlQueryRequest request) { + request.esqlVersion(EsqlTestUtils.latestEsqlVersionOrSnapshot()); + } @After public void ensureExchangesAreReleased() throws Exception { @@ -138,9 +154,18 @@ protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas) { } protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter) { - EsqlQueryRequest request = new EsqlQueryRequest(); + return run(esqlCommands, pragmas, filter, null); + } + + protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter, String version) { + EsqlQueryRequest request = syncRequestOnLatestVersion(); + if (version != null) { + request.esqlVersion(version); + } request.query(esqlCommands); - request.pragmas(pragmas); + if (pragmas != null) { + request.pragmas(pragmas); + } if (filter != null) { request.filter(filter); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index bc4708cc19c1f..736a20b367b71 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -159,7 +159,7 @@ private void createRemoteIndex(int numDocs) throws Exception { public void testCancel() throws Exception { createRemoteIndex(between(10, 100)); - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); request.pragmas(randomPragmas()); PlainActionFuture requestFuture = new PlainActionFuture<>(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 2b59e6dd1957d..77fc6987e07c3 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -457,7 +457,7 @@ public void testEnrichCoordinatorThenEnrichRemote() { } protected EsqlQueryResponse runQuery(String query) { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); if (randomBoolean()) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index ac2abf21a8f8c..9021a10562124 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -156,7 +156,7 @@ public void testProfile() { waitForNoInitializingShards(client(REMOTE_CLUSTER), TimeValue.timeValueSeconds(30), "logs-2"); final int localOnlyProfiles; { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("FROM logs* | stats sum(v)"); request.pragmas(pragmas); request.profile(true); @@ -171,7 +171,7 @@ public void testProfile() { } final int remoteOnlyProfiles; { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("FROM *:logs* | stats sum(v)"); request.pragmas(pragmas); request.profile(true); @@ -186,7 +186,7 @@ public void testProfile() { } final int allProfiles; { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("FROM logs*,*:logs* | stats total = sum(v)"); request.pragmas(pragmas); request.profile(true); @@ -203,7 +203,7 @@ public void testProfile() { } public void testWarnings() throws Exception { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("FROM logs*,*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); PlainActionFuture future = new PlainActionFuture<>(); InternalTestCluster cluster = cluster(LOCAL_CLUSTER); @@ -229,7 +229,7 @@ public void testWarnings() throws Exception { } protected EsqlQueryResponse runQuery(String query) { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); return runQuery(request); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index 9b3f61175c3f7..43c282e9361f9 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -328,7 +328,7 @@ public void testTopN() { } public void testProfile() { - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.pragmas(randomPragmas()); request.query("from listens* | sort timestamp DESC | limit 1 | " + enrichSongCommand() + " | KEEP timestamp, artist"); request.profile(true); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 85eb0c02625ad..f16f5808da89f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -130,7 +130,7 @@ public void testBreaker() { setRequestCircuitBreakerLimit(ByteSizeValue.ofBytes(between(256, 512))); try { final ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> { - var request = new EsqlQueryRequest(); + var request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("from test_breaker | stats count_distinct(foo) by bar"); request.pragmas(randomPragmas()); try (var ignored = client().execute(EsqlQueryAction.INSTANCE, request).actionGet(2, TimeUnit.MINUTES)) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 4524c4c889fac..82ab52ca5a1b0 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -367,7 +367,7 @@ protected void doRun() throws Exception { try { scriptPermits.release(numberOfDocs()); // do not block Lucene operators Client client = client(coordinator); - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); client().admin() .indices() .prepareUpdateSettings("test") diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index e2e635917ed1c..27edadb25ab26 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -52,8 +52,11 @@ protected Collection> nodePlugins() { } @Override - protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter) { - EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest(); + protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter, String version) { + EsqlQueryRequest request = AbstractEsqlIntegTestCase.asyncSyncRequestOnLatestVersion(); + if (version != null) { + request.esqlVersion(version); + } request.query(esqlCommands); request.pragmas(pragmas); // deliberately small timeout, to frequently trigger incomplete response diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java index a1fbee17ef8ec..150d617bb4e29 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java @@ -37,19 +37,17 @@ public void testFilter() { } bulk.get(); { - EsqlQueryRequest request = new EsqlQueryRequest(); - request.query("FROM test | limit 1000"); - request.filter(new RangeQueryBuilder("@timestamp").from(epoch - TimeValue.timeValueHours(3).millis()).to("now")); - try (var resp = run(request)) { + String query = "FROM test | limit 1000"; + var filter = new RangeQueryBuilder("@timestamp").from(epoch - TimeValue.timeValueHours(3).millis()).to("now"); + try (var resp = run(query, null, filter)) { List> values = getValuesList(resp); assertThat(values, hasSize(oldDocs)); } } { - EsqlQueryRequest request = new EsqlQueryRequest(); - request.query("FROM test | limit 1000"); - request.filter(new RangeQueryBuilder("@timestamp").from("now").to(epoch + TimeValue.timeValueHours(3).millis())); - try (var resp = run(request)) { + String query = "FROM test | limit 1000"; + var filter = new RangeQueryBuilder("@timestamp").from("now").to(epoch + TimeValue.timeValueHours(3).millis()); + try (var resp = run(query, null, filter)) { List> values = getValuesList(resp); assertThat(values, hasSize(newDocs)); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java index 0f05add15da53..445ca0414ed88 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java @@ -68,7 +68,7 @@ public void testCollectWarnings() throws Exception { DiscoveryNode coordinator = randomFrom(clusterService().state().nodes().stream().toList()); client().admin().indices().prepareRefresh("index-1", "index-2").get(); - EsqlQueryRequest request = new EsqlQueryRequest(); + EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); request.query("FROM index-* | EVAL ip = to_ip(host) | STATS s = COUNT(*) by ip | KEEP ip | LIMIT 100"); request.pragmas(randomPragmas()); CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index 54ae2f4c90fc1..32ff0cf7bc6aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -69,9 +69,7 @@ public EsqlQueryRequest(StreamInput in) throws IOException { public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.hasText(esqlVersion) == false) { - // TODO: make this required - // "https://github.com/elastic/elasticsearch/issues/104890" - // validationException = addValidationError(invalidVersion("is required"), validationException); + validationException = addValidationError(invalidVersion("is required"), validationException); } else { EsqlVersion version = EsqlVersion.parse(esqlVersion); if (version == null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index 511fbd9f1c275..9eeffbb35c10e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -7,11 +7,13 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.Build; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.core.esql.action.internal.SharedSecrets; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.version.EsqlVersion; public class EsqlQueryRequestBuilder extends org.elasticsearch.xpack.core.esql.action.EsqlQueryRequestBuilder< EsqlQueryRequest, @@ -27,6 +29,8 @@ public static EsqlQueryRequestBuilder newSyncEsqlQueryRequestBuilder(Elasticsear private EsqlQueryRequestBuilder(ElasticsearchClient client, EsqlQueryRequest request) { super(client, EsqlQueryAction.INSTANCE, request); + EsqlVersion version = Build.current().isSnapshot() ? EsqlVersion.SNAPSHOT : EsqlVersion.latestReleased(); + esqlVersion(version.versionStringWithoutEmoji()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 6ec1af033f86c..0c9bfa2054b11 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -238,7 +238,6 @@ public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104890") public void testMissingVersionIsNotValid() throws IOException { String missingVersion = randomBoolean() ? "" : ", \"version\": \"\""; String json = String.format(Locale.ROOT, """ diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 2c393ea7ed1df..6522196eb76ac 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -690,6 +690,9 @@ protected Request esqlRequest(String command) throws IOException { body.endObject(); } } + // TODO: we should use the latest or a random version, even when new versions are released. + String version = Build.current().isSnapshot() ? "snapshot" : "2024.04.01"; + body.field("version", version); body.endObject(); Request request = new Request("POST", "_query"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index 412e23a768535..bc1bf5987a6d8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -22,6 +22,7 @@ esql.query: body: query: 'FROM test | sort emp_no | eval ip = to_ip(coalesce(ip1.keyword, "255.255.255.255")) | keep emp_no, ip' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -41,6 +42,7 @@ esql.query: body: query: 'FROM test | sort emp_no | eval x1 = concat(ip1, ip2), x2 = coalesce(x1, "255.255.255.255"), x3 = to_ip(x2) | keep emp_no, x*' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -109,6 +111,7 @@ esql.query: body: query: 'from index* metadata _index | limit 5 | sort _index desc' + version: 2024.04.01 - match: { columns.0.name: http.headers } - match: { columns.0.type: unsupported } - match: { columns.1.name: http.headers.location } @@ -171,6 +174,7 @@ esql.query: body: query: 'from npe_single_value* | stats x = avg(field1) | limit 10' + version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: double } - length: { values: 1 } @@ -180,6 +184,7 @@ esql.query: body: query: 'from npe_single_value* | stats x = avg(field2) | limit 10' + version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: double } - length: { values: 1 } @@ -189,6 +194,7 @@ esql.query: body: query: 'from npe_single_value* | stats x = avg(field3) | limit 10' + version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: double } - length: { values: 1 } @@ -232,6 +238,7 @@ esql.query: body: query: 'from idx_with_date_ip_txt | where id == 1 | eval x = date_format(text, date), y = date_extract(text2, date), p = date_parse(text, "2024-03-14") | keep x, y, p | limit 1' + version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: keyword } - match: { columns.1.name: y } @@ -245,6 +252,7 @@ esql.query: body: query: 'from idx_with_date_ip_txt | where id > 1 | eval x = cidr_match(ip, text) | sort id | keep id, x | limit 2' + version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: long } - match: { columns.1.name: x } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml index 52d390e7b288b..da87251c35966 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml @@ -118,6 +118,7 @@ setup: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: "color"} - match: {columns.0.type: "keyword"} @@ -139,6 +140,7 @@ setup: esql.query: body: query: 'from test | sort count | limit 1' + version: 2024.04.01 - match: {columns.1.name: "count"} - match: {columns.1.type: "long"} @@ -151,6 +153,7 @@ setup: body: query: 'from test | keep data | sort data | limit 2' columnar: true + version: 2024.04.01 - match: {columns.0.name: "data"} - match: {columns.0.type: "long"} @@ -162,6 +165,7 @@ setup: esql.query: body: query: 'from test | eval x = count + 7 | sort x | limit 1' + version: 2024.04.01 - match: {columns.0.name: "color"} - match: {columns.1.name: "count"} @@ -179,6 +183,7 @@ setup: esql.query: body: query: 'from test | sort time | eval x = data + 1, y = data_d + count, z = x + y | keep data, x, y, z, time | limit 2' + version: 2024.04.01 - match: {columns.0.name: "data"} - match: {columns.0.type: "long"} @@ -209,6 +214,7 @@ setup: body: query: 'from test | sort time | limit 2 | keep count' columnar: true + version: 2024.04.01 - length: {columns: 1} - match: {columns.0.name: "count"} @@ -222,6 +228,7 @@ setup: body: query: 'from test | sort time desc | limit 2 | keep count' columnar: true + version: 2024.04.01 - length: {columns: 1} - match: {columns.0.name: "count"} @@ -235,6 +242,7 @@ setup: body: query: 'from test | sort time | limit 2 | keep count | eval x = count + 1' columnar: true + version: 2024.04.01 - length: {columns: 2} - match: {columns.0.name: "count"} @@ -252,6 +260,7 @@ setup: body: query: 'from test | sort time | limit 2 | keep count | eval x = count + 1 | keep x' columnar: true + version: 2024.04.01 - length: {columns: 1} - match: {columns.0.name: "x"} @@ -265,6 +274,7 @@ setup: esql.query: body: query: 'from test | limit 10 | sort time | limit 1' + version: 2024.04.01 - length: {columns: 6} - length: {values: 1} @@ -278,6 +288,7 @@ setup: body: query: 'row a = ? | eval b = ?, c = 1 + ?' params: ["foo", 15, 10] + version: 2024.04.01 - length: {columns: 3} - match: {columns.0.name: "a"} @@ -297,6 +308,7 @@ setup: body: query: 'from test | where color == ? and count == ? and time == ? | keep data, count, color' params: ["green", 44, 1674835275193] + version: 2024.04.01 - length: {columns: 3} - match: {columns.0.name: "data"} @@ -315,6 +327,7 @@ setup: body: query: 'from test | eval x = ?, y = ?, z = ?, t = ?, u = ?, v = ? | keep x, y, z, t, u, v | limit 3' params: [{"value": 1, "type": "keyword"}, {"value": 2, "type": "double"}, null, true, 123, {"value": 123, "type": "long"}] + version: 2024.04.01 - length: {columns: 6} - match: {columns.0.name: "x"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml index a18dbba1abfab..f6271ab02b816 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml @@ -122,6 +122,7 @@ row wise and keep null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: false + version: 2024.04.01 - length: {columns: 8} - match: {columns.0.name: "always_null"} @@ -153,6 +154,7 @@ row wise and drop null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: false + version: 2024.04.01 - length: {all_columns: 8} - match: {all_columns.0.name: "always_null"} @@ -196,6 +198,7 @@ columnar and keep null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: true + version: 2024.04.01 - length: {columns: 8} - match: {columns.0.name: "always_null"} @@ -227,6 +230,7 @@ columnar and drop null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: true + version: 2024.04.01 - length: {all_columns: 8} - match: {all_columns.0.name: "always_null"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml index b40564cdac1de..e505d11cbe137 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml @@ -47,6 +47,7 @@ setup: esql.query: body: query: 'FROM test | where keyword =~ keywordUpper | keep id, keyword, keywordUpper' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -66,6 +67,7 @@ setup: esql.query: body: query: 'FROM test | where text =~ textCamel | keep id, text, textCamel' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -86,6 +88,7 @@ setup: esql.query: body: query: 'FROM test | where keyword =~ text | keep id, keyword, text' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -106,6 +109,7 @@ setup: esql.query: body: query: 'FROM test | where keywordUpper =~ textCamel | keep id, keywordUpper, textCamel | sort id' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -127,6 +131,7 @@ setup: esql.query: body: query: 'FROM test | where keywordUpper =~ "fo*" | keep id, keywordUpper' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -141,6 +146,7 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "foo*" | keep id, wildcard' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -156,6 +162,7 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "fOo*" | keep id, wildcard' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -172,6 +179,7 @@ setup: esql.query: body: query: 'FROM test | where keywordUpper =~ "fo?" | keep id, keywordUpper' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -186,6 +194,7 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "bar?" | keep id, wildcard' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -201,6 +210,7 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "bAr?" | keep id, wildcard' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -219,6 +229,7 @@ setup: esql.query: body: query: 'FROM test | where text =~ "Fo*" | keep id, text | sort id' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -233,6 +244,7 @@ setup: esql.query: body: query: 'FROM test | where wildcardText =~ "fOo*" | keep id, wildcardText' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -248,6 +260,7 @@ setup: esql.query: body: query: 'FROM test | where wildcardText =~ "bAr?" | keep id, wildcardText' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -266,6 +279,7 @@ setup: esql.query: body: query: 'FROM test | where text =~ "fo\\*" | keep id, text' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -283,6 +297,7 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ wildcardText | keep id, wildcard, wildcardText | sort id' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -302,6 +317,7 @@ setup: esql.query: body: query: 'FROM test | where NOT wildcard =~ wildcardText | keep id, wildcard, wildcardText | sort id' + version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml index 17034de677b8d..ec415cbfa12d9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml @@ -130,6 +130,7 @@ avg 8.14 or after: query: 'FROM test | STATS AVG(data) | LIMIT 1' columnar: true profile: true + version: 2024.04.01 - match: {columns.0.name: "AVG(data)"} - match: {columns.0.type: "double"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml index 053d33ee9bf43..2274d5973087d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml @@ -97,6 +97,7 @@ geo_point: esql.query: body: query: 'from geo_points | sort id' + version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: location } @@ -114,6 +115,7 @@ geo_point unsortable: esql.query: body: query: 'from geo_points | sort location' + version: 2024.04.01 --- geo_point unsortable with limit: @@ -122,6 +124,7 @@ geo_point unsortable with limit: esql.query: body: query: 'from geo_points | LIMIT 10 | sort location' + version: 2024.04.01 --- geo_point unsortable with limit from row: @@ -130,6 +133,7 @@ geo_point unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) | limit 5 | sort pt' + version: 2024.04.01 --- values unsupported for geo_point: @@ -138,6 +142,7 @@ values unsupported for geo_point: esql.query: body: query: 'FROM geo_points | STATS VALUES(location)' + version: 2024.04.01 --- cartesian_point: @@ -147,6 +152,7 @@ cartesian_point: esql.query: body: query: 'from cartesian_points | sort id' + version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: location } @@ -164,6 +170,7 @@ cartesian_point unsortable: esql.query: body: query: 'from cartesian_points | sort location' + version: 2024.04.01 --- cartesian_point unsortable with limit: @@ -172,6 +179,7 @@ cartesian_point unsortable with limit: esql.query: body: query: 'from cartesian_points | LIMIT 10 | sort location' + version: 2024.04.01 --- cartesian_point unsortable with limit from row: @@ -180,6 +188,7 @@ cartesian_point unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) | limit 5 | sort pt' + version: 2024.04.01 --- geo_shape: @@ -189,6 +198,7 @@ geo_shape: esql.query: body: query: 'from geo_shapes | sort id' + version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: shape } @@ -206,6 +216,7 @@ geo_shape unsortable: esql.query: body: query: 'from geo_shapes | sort shape' + version: 2024.04.01 --- geo_shape unsortable with limit: @@ -214,6 +225,7 @@ geo_shape unsortable with limit: esql.query: body: query: 'from geo_shapes | LIMIT 10 | sort shape' + version: 2024.04.01 --- geo_shape unsortable with limit from row: @@ -222,6 +234,7 @@ geo_shape unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL shape = TO_GEOSHAPE(wkt) | limit 5 | sort shape' + version: 2024.04.01 --- cartesian_shape: @@ -231,6 +244,7 @@ cartesian_shape: esql.query: body: query: 'from cartesian_shapes | sort id' + version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: shape } @@ -248,6 +262,7 @@ cartesian_shape unsortable: esql.query: body: query: 'from cartesian_shapes | sort shape' + version: 2024.04.01 --- cartesian_shape unsortable with limit: @@ -256,6 +271,7 @@ cartesian_shape unsortable with limit: esql.query: body: query: 'from cartesian_shapes | LIMIT 10 | sort shape' + version: 2024.04.01 --- cartesian_shape unsortable with limit from row: @@ -264,3 +280,4 @@ cartesian_shape unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL shape = TO_CARTESIANSHAPE(wkt) | limit 5 | sort shape' + version: 2024.04.01 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml index 672dfa1503c40..69a9213980f98 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml @@ -120,6 +120,7 @@ setup: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: "color"} - match: {columns.0.type: "keyword"} @@ -146,6 +147,7 @@ setup: body: query: 'from test | where color == "red" | stats avg(data) by color' columnar: true + version: 2024.04.01 - match: {columns.0.name: "avg(data)"} - match: {columns.0.type: "double"} @@ -162,6 +164,7 @@ setup: body: query: 'from test | stats avg(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "avg(count)"} - match: {columns.0.type: "double"} @@ -176,6 +179,7 @@ setup: body: query: 'from test | stats f1 = avg(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "f1"} - match: {columns.0.type: "double"} @@ -190,6 +194,7 @@ setup: body: query: 'from test | stats count(data)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "count(data)"} - match: {columns.0.type: "long"} @@ -204,6 +209,7 @@ setup: body: query: 'from test | stats dataCount = count(data)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "dataCount"} - match: {columns.0.type: "long"} @@ -218,6 +224,7 @@ setup: body: query: 'from test | stats min(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "min(count)"} - match: {columns.0.type: "long"} @@ -232,6 +239,7 @@ setup: body: query: 'from test | stats minCount=min(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "minCount"} - match: {columns.0.type: "long"} @@ -246,6 +254,7 @@ setup: body: query: 'from test | stats max(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "max(count)"} - match: {columns.0.type: "long"} @@ -260,6 +269,7 @@ setup: body: query: 'from test | stats maxCount=max(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "maxCount"} - match: {columns.0.type: "long"} @@ -272,6 +282,7 @@ setup: body: query: 'from test | stats avg(count) by color | sort color | limit 2' columnar: true + version: 2024.04.01 - match: {columns.0.name: "avg(count)"} - match: {columns.0.type: "double"} @@ -289,6 +300,7 @@ setup: body: query: 'from test | stats med=median(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -303,6 +315,7 @@ setup: body: query: 'from test | stats med=median(count_d)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -317,6 +330,7 @@ setup: body: query: 'from test | stats med=median(count) by color | sort med' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -334,6 +348,7 @@ setup: body: query: 'from test | stats med=median(count_d) by color | sort med' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -351,6 +366,7 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -365,6 +381,7 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count_d)' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -379,6 +396,7 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count) by color | sort color' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -396,6 +414,7 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count_d) by color | sort color' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -412,6 +431,7 @@ setup: esql.query: body: query: 'from test | stats avg_count = avg(count) | eval x = avg_count + 7' + version: 2024.04.01 - length: {values: 1} - length: {values.0: 2} @@ -425,6 +445,7 @@ setup: esql.query: body: query: 'from test | stats x = avg(count) | where x > 100' + version: 2024.04.01 - length: {values: 0} @@ -434,6 +455,7 @@ setup: esql.query: body: query: 'from test | eval nullsum = count_d + null | sort nullsum | limit 1' + version: 2024.04.01 - length: {columns: 8} - length: {values: 1} @@ -449,6 +471,7 @@ setup: esql.query: body: query: 'row a = 1, b = 2, c = null | eval z = c + b + a' + version: 2024.04.01 - length: {columns: 4} - length: {values: 1} @@ -474,6 +497,7 @@ setup: esql.query: body: query: 'from test | eval nullsum = count_d + null | stats count(nullsum)' + version: 2024.04.01 - length: {columns: 1} - length: {values: 1} @@ -490,6 +514,7 @@ setup: esql.query: body: query: 'row l=1, d=1.0, ln=1 + null, dn=1.0 + null | stats sum(l), sum(d), sum(ln), sum(dn)' + version: 2024.04.01 - length: {columns: 4} - length: {values: 1} @@ -516,6 +541,7 @@ grouping on text: body: query: 'FROM test | STATS med=median(count) BY text | SORT med' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml index 0684939932774..1980ed8bb040c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml @@ -39,6 +39,7 @@ group on null: body: query: 'FROM test | STATS med=median(never_null) BY always_null | LIMIT 1' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - match: {columns.1.name: "always_null"} @@ -54,6 +55,7 @@ group on null, long: body: query: 'FROM test | STATS med=median(sometimes_null) BY always_null, never_null | SORT always_null, never_null | LIMIT 10' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - match: {columns.1.name: "always_null"} @@ -72,6 +74,7 @@ agg on null: body: query: 'FROM test | STATS med=median(always_null) | LIMIT 1' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - length: {values: 1} @@ -85,6 +88,7 @@ agg on missing: body: query: 'FROM test | STATS med=median(missing) | LIMIT 1' columnar: true + version: 2024.04.01 --- group on missing: @@ -94,6 +98,7 @@ group on missing: body: query: 'FROM test | STATS med=median(never_null) BY missing | LIMIT 1' columnar: true + version: 2024.04.01 --- agg on half missing: @@ -119,6 +124,7 @@ agg on half missing: body: query: 'FROM test* | STATS med=median(missing) | LIMIT 1' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - length: {values: 1} @@ -148,6 +154,7 @@ group on half missing: body: query: 'FROM test,test2 | STATS med=median(never_null) BY missing | LIMIT 1' columnar: true + version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - match: {columns.1.name: "missing"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml index 9dded5d0855c6..bbf8b33445fa3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml @@ -35,6 +35,7 @@ constant_keyword: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: color } - match: { columns.0.type: keyword } - match: { columns.1.name: kind } @@ -49,7 +50,7 @@ constant_keyword: esql.query: body: query: 'from test | eval l=length(kind) | keep l' - + version: 2024.04.01 - match: {columns.0.name: l} - match: {columns.0.type: integer} - length: {values: 1} @@ -80,6 +81,7 @@ constant_keyword with null value: esql.query: body: query: 'from test | limit 1' + version: 2024.04.01 - match: { columns.0.name: color } - match: { columns.0.type: keyword } - match: { columns.1.name: kind } @@ -113,6 +115,7 @@ multivalued keyword: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} @@ -144,6 +147,7 @@ keyword no doc_values: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} @@ -174,6 +178,7 @@ wildcard: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} @@ -185,6 +190,7 @@ wildcard: esql.query: body: query: 'from test | eval l=length(card) | keep l' + version: 2024.04.01 - match: {columns.0.name: l} - match: {columns.0.type: integer} - length: {values: 1} @@ -225,6 +231,7 @@ numbers: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: d} - match: {columns.0.type: double} - match: {columns.1.name: i} @@ -276,6 +283,7 @@ small_numbers: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: b} - match: {columns.0.type: integer} - match: {columns.1.name: f} @@ -296,6 +304,7 @@ small_numbers: esql.query: body: query: 'from test | eval sum_d = b + f + hf + s, sum_i = b + s | keep sum_d, sum_i' + version: 2024.04.01 - match: {columns.0.name: sum_d} - match: {columns.0.type: double} - match: {columns.1.name: sum_i} @@ -310,6 +319,7 @@ small_numbers: esql.query: body: query: 'from test | eval r_f = round(f), r_hf = round(hf) | keep r_f, r_hf' + version: 2024.04.01 - match: {columns.0.name: r_f} - match: {columns.0.type: double} - match: {columns.1.name: r_hf} @@ -346,6 +356,7 @@ scaled_float: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: d} - match: {columns.0.type: double} - match: {columns.1.name: f} @@ -360,6 +371,7 @@ scaled_float: esql.query: body: query: 'from test | eval sum = d + f | keep sum' + version: 2024.04.01 - match: {columns.0.name: sum} - match: {columns.0.type: double} - length: {values: 1} @@ -390,6 +402,7 @@ multivalued boolean: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: booleans } - match: { columns.0.type: boolean } - length: { values: 1 } @@ -422,6 +435,7 @@ ip: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: ip } - match: { columns.0.type: ip } - match: { columns.1.name: keyword } @@ -436,7 +450,7 @@ ip: esql.query: body: query: 'from test | where keyword == "127.0.0.2" | rename ip as IP | drop keyword' - + version: 2024.04.01 - match: {columns.0.name: IP } - match: {columns.0.type: ip } - length: {values: 1 } @@ -492,6 +506,7 @@ alias: esql.query: body: query: 'from test | keep foo, bar, level1.level2, level2_alias, some_long, some_long_alias, some_long_alias2, some_date, some_date_alias | sort level2_alias' + version: 2024.04.01 - match: { columns.0.name: foo } - match: { columns.0.type: keyword } - match: { columns.1.name: bar } @@ -536,6 +551,7 @@ alias: esql.query: body: query: 'from test | where bar == "abc" | keep foo, bar, level1.level2, level2_alias' + version: 2024.04.01 - match: { columns.0.name: foo } - match: { columns.0.type: keyword } - match: { columns.1.name: bar } @@ -556,6 +572,7 @@ alias: esql.query: body: query: 'from test | where level2_alias == 10 | keep foo, bar, level1.level2, level2_alias' + version: 2024.04.01 - match: { columns.0.name: foo } - match: { columns.0.type: keyword } - match: { columns.1.name: bar } @@ -576,6 +593,7 @@ alias: esql.query: body: query: 'from test | where level2_alias == 20' + version: 2024.04.01 - length: { values: 0 } - do: @@ -584,6 +602,7 @@ alias: esql.query: body: query: 'from test | stats x = max(level2_alias)' + version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: long } - length: { values: 1 } @@ -614,6 +633,7 @@ version: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: version } - match: { columns.0.type: version } - length: { values: 1 } @@ -647,6 +667,7 @@ id: esql.query: body: query: 'from test metadata _id | keep _id, kw' + version: 2024.04.01 - match: { columns.0.name: _id } - match: { columns.0.type: keyword } - length: { values: 1 } @@ -678,6 +699,7 @@ unsigned_long: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: number } - match: { columns.0.type: unsigned_long } - length: { values: 1 } @@ -701,6 +723,7 @@ _source: esql.query: body: query: 'FROM test METADATA _source | KEEP _source | LIMIT 1' + version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } @@ -736,6 +759,7 @@ _source keep all: esql.query: body: query: 'FROM test METADATA _source | LIMIT 1' + version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } @@ -772,6 +796,7 @@ _source disabled: esql.query: body: query: 'FROM test METADATA _source | KEEP _source | LIMIT 1' + version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } @@ -800,6 +825,7 @@ text: esql.query: body: query: 'FROM test | LIMIT 1' + version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: text} - length: {values: 1} @@ -831,6 +857,7 @@ synthetic _source text stored: esql.query: body: query: 'FROM test | LIMIT 1' + version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: text} - length: {values: 1} @@ -864,6 +891,7 @@ synthetic _source text with parent keyword: esql.query: body: query: 'FROM test | KEEP card.text | LIMIT 1' + version: 2024.04.01 - match: {columns.0.name: card.text} - match: {columns.0.type: text} - length: {values: 1} @@ -897,6 +925,7 @@ geo_point: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: location } - match: { columns.0.type: geo_point } - length: { values: 1 } @@ -930,6 +959,7 @@ cartesian_point: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: location } - match: { columns.0.type: cartesian_point } - length: { values: 1 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index 7549d33b4de1c..30b81860f014f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -117,6 +117,7 @@ load everything: esql.query: body: query: 'from test metadata _id' + version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -143,6 +144,7 @@ load a document: esql.query: body: query: 'from test | where @timestamp == "2021-04-28T18:50:23.142Z"' + version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} @@ -161,6 +163,7 @@ filter on counter: esql.query: body: query: 'from test | where k8s.pod.network.tx == 1434577921' + version: 2024.04.01 --- from doc with aggregate_metric_double: @@ -171,6 +174,7 @@ from doc with aggregate_metric_double: esql.query: body: query: 'from test2' + version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -191,6 +195,7 @@ stats on aggregate_metric_double: esql.query: body: query: 'FROM test2 | STATS max(agg_metric) BY dim' + version: 2024.04.01 --- from index pattern unsupported counter: @@ -201,6 +206,7 @@ from index pattern unsupported counter: esql.query: body: query: 'FROM test*' + version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -229,6 +235,7 @@ from index pattern explicit counter use: esql.query: body: query: 'FROM test* | keep *.tx' + version: 2024.04.01 --- @@ -249,6 +256,7 @@ _source: esql.query: body: query: 'FROM test METADATA _source | WHERE @timestamp == "2021-04-28T18:50:23.142Z" | KEEP _source | LIMIT 1' + version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index c34666bb12b02..1ff0b8763c2eb 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -120,6 +120,7 @@ unsupported: esql.query: body: query: 'from test' + version: 2024.04.01 - match: { columns.0.name: aggregate_metric_double } - match: { columns.0.type: unsupported } @@ -217,6 +218,7 @@ unsupported: esql.query: body: query: 'from test | limit 0' + version: 2024.04.01 - match: { columns.0.name: aggregate_metric_double } - match: { columns.0.type: unsupported } - match: { columns.1.name: binary } @@ -283,6 +285,7 @@ unsupported: esql.query: body: query: 'from test | keep histogram | limit 0' + version: 2024.04.01 - match: { columns.0.name: histogram } - match: { columns.0.type: unsupported } - length: { values: 0 } @@ -300,6 +303,7 @@ unsupported with sort: esql.query: body: query: 'from test | sort some_doc.bar' + version: 2024.04.01 - match: { columns.0.name: aggregate_metric_double } - match: { columns.0.type: unsupported } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml index 05ba568838fe4..7f78ee1c7b099 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml @@ -66,6 +66,7 @@ load everything: esql.query: body: query: 'from test' + version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -91,6 +92,7 @@ load a document: esql.query: body: query: 'from test | where @timestamp == "2021-04-28T18:50:23.142Z"' + version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} @@ -110,6 +112,7 @@ filter on counter: esql.query: body: query: 'from test | where k8s.pod.network.tx == 1434577921' + version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml index d8aad27534433..ff04eec1d1737 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml @@ -51,6 +51,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -67,6 +68,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 | limit 2' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -81,6 +83,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 desc nulls last | limit 1' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -95,6 +98,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1, message2' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -113,6 +117,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1, message2 | limit 3' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -130,6 +135,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 desc nulls first, message2 | limit 3' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -146,6 +152,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1, message2 | limit 2' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -162,6 +169,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 nulls first, message2' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -182,6 +190,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 nulls first, message2 nulls first' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -202,6 +211,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 desc nulls first, message2 desc nulls first' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -222,6 +232,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | where message1 == "foo1" | keep message1, message2 | sort message1, message2' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -236,6 +247,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | where message1 == "foo1" or message2 == 2 | keep message1, message2 | sort message1, message2' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -252,6 +264,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | stats x = max(message2)' + version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: long } - length: { values: 1 } @@ -263,6 +276,7 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' + version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -338,6 +352,7 @@ same_name_different_type: esql.query: body: query: 'from test1,test2' + version: 2024.04.01 - match: { columns.0.name: message } - match: { columns.0.type: unsupported } - length: { values: 4 } @@ -389,6 +404,7 @@ same_name_different_type_same_family: esql.query: body: query: 'from test1,test2 | sort message | keep message' + version: 2024.04.01 - match: { columns.0.name: message } - match: { columns.0.type: keyword } - length: { values: 4 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 8a5d3be6758e3..8fbc4be3cfb3b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -103,6 +103,7 @@ teardown: esql.query: body: query: 'from test | enrich city_codes_policy on city_id | keep name, city, country | sort name' + version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -126,6 +127,7 @@ teardown: esql.query: body: query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country | sort name' + version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -149,6 +151,7 @@ teardown: esql.query: body: query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country_name = country | sort name' + version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -176,6 +179,7 @@ teardown: esql.query: body: query: 'from test | keep name, city_name | enrich city_names_policy on city_name | sort name' + version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 74c0e9ef1bb31..018106cf1aa11 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -52,6 +52,7 @@ setup: esql.query: body: query: 'from test | where data > 2 | sort count desc | limit 5 | stats m = max(data)' + version: 2024.04.01 - do: {xpack.usage: {}} - match: { esql.available: true } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 076bf116292d0..a9ea9c704e6e8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -115,6 +115,7 @@ teardown: esql.query: body: query: 'FROM events | eval ip_str = to_string(ip) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + version: 2024.04.01 - match: { columns.0.name: "ip" } - match: { columns.0.type: "ip" } @@ -143,6 +144,7 @@ teardown: esql.query: body: query: 'FROM events_text | ENRICH networks-policy ON ip_text | sort @timestamp | KEEP ip_text, name, department, message' + version: 2024.04.01 - match: { columns.0.name: "ip_text" } - match: { columns.0.type: "text" } @@ -170,6 +172,7 @@ teardown: esql.query: body: query: 'FROM events | eval ip_str = concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + version: 2024.04.01 --- "IP": @@ -183,6 +186,7 @@ teardown: esql.query: body: query: 'FROM events | ENRICH networks-policy ON ip | sort @timestamp | KEEP ip, name, department, message' + version: 2024.04.01 - match: { columns.0.name: "ip" } - match: { columns.0.type: "ip" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml index 19b08007fe18a..288c17bac1d16 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml @@ -44,6 +44,7 @@ esql.query: body: query: 'ROW name="engineering" | ENRICH departments-policy | LIMIT 10 | KEEP name, employees' + version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -58,6 +59,7 @@ esql.query: body: query: 'ROW name="sales" | ENRICH departments-policy ON name WITH department=name | WHERE name==department | KEEP name, department | LIMIT 10' + version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -257,6 +259,7 @@ movies: SORT total DESC, title ASC | KEEP total, title | LIMIT 10 + version: 2024.04.01 - match: { columns.0.name: "total" } - match: { columns.0.type: "long" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml index e181f77f2bcef..a0ec659b21d0e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml @@ -32,6 +32,7 @@ setup: esql.query: body: query: 'FROM events | eval fixed_format = date_format("MMMM", @timestamp), variable_format = date_format(format, @timestamp) | sort @timestamp | keep @timestamp, fixed_format, variable_format' + version: 2024.04.01 - match: { columns.0.name: "@timestamp" } - match: { columns.0.type: "date" } @@ -54,6 +55,7 @@ setup: body: query: 'FROM events | eval fixed_format = date_format("MMMM", @timestamp), variable_format = date_format(format, @timestamp) | sort @timestamp | keep @timestamp, fixed_format, variable_format' locale: "it-IT" + version: 2024.04.01 - match: { columns.0.name: "@timestamp" } - match: { columns.0.type: "date" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 9607b64385721..c8867b2d1bf88 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -41,6 +41,7 @@ setup: esql.query: body: query: 'from test | sort emp_no' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -66,6 +67,7 @@ setup: esql.query: body: query: 'from test | where tag == "baz" | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -87,6 +89,7 @@ setup: esql.query: body: query: 'from test | where tag LIKE "*az" | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -108,6 +111,7 @@ setup: esql.query: body: query: 'from test | where tag RLIKE ".*az" | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -133,6 +137,7 @@ setup: esql.query: body: query: 'from test | where tag IN ("abc", "baz") | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -158,6 +163,7 @@ setup: esql.query: body: query: 'from test | where tag IN ("abc", tag) | keep emp_no, name, job, tag | sort emp_no' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -184,6 +190,7 @@ setup: esql.query: body: query: 'from test | where tag NOT IN ("abc", "baz") | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -205,6 +212,7 @@ setup: esql.query: body: query: 'from test | eval x = tag | where x == "baz" | keep emp_no, name, job, x' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -226,6 +234,7 @@ setup: esql.query: body: query: 'from test | where job == "IT Director" | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -247,6 +256,7 @@ setup: esql.query: body: query: 'from test | where job LIKE "*Specialist" | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -268,6 +278,7 @@ setup: esql.query: body: query: 'from test | where job RLIKE ".*Specialist" | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -290,6 +301,7 @@ setup: esql.query: body: query: 'from test | sort tag | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -313,6 +325,7 @@ setup: esql.query: body: query: 'from test | sort job | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -335,6 +348,7 @@ setup: esql.query: body: query: 'from test | sort job desc | keep emp_no, name, job, tag' + version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -358,6 +372,7 @@ setup: esql.query: body: query: 'from test | sort name | eval description = concat(name, " - ", job) | keep description' + version: 2024.04.01 - match: { columns.0.name: "description" } - match: { columns.0.type: "keyword" } @@ -378,6 +393,7 @@ setup: esql.query: body: query: 'from test | sort emp_no | eval split = split(tag, " ") | keep split' + version: 2024.04.01 - match: { columns.0.name: "split" } - match: { columns.0.type: "keyword" } @@ -395,6 +411,7 @@ setup: esql.query: body: query: 'from test | stats jobs = count(job) | keep jobs' + version: 2024.04.01 - match: { columns.0.name: "jobs" } - match: { columns.0.type: "long" } @@ -411,6 +428,7 @@ setup: esql.query: body: query: 'from test | stats tags = count(tag) | keep tags' + version: 2024.04.01 - match: { columns.0.name: "tags" } - match: { columns.0.type: "long" } @@ -427,6 +445,7 @@ setup: esql.query: body: query: 'from test | stats names = count(name) by job | keep names' + version: 2024.04.01 - match: { columns.0.name: "names" } - match: { columns.0.type: "long" } @@ -444,6 +463,7 @@ setup: esql.query: body: query: 'from test | stats names = count(name) by tag | keep names' + version: 2024.04.01 - match: { columns.0.name: "names" } - match: { columns.0.type: "long" } @@ -488,6 +508,7 @@ setup: esql.query: body: query: 'from test2 | sort emp_no | keep job' + version: 2024.04.01 - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } @@ -531,6 +552,7 @@ setup: esql.query: body: query: 'from test2 | sort emp_no | keep job' + version: 2024.04.01 - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } @@ -549,6 +571,7 @@ values: esql.query: body: query: 'FROM test | STATS job = VALUES(job) | EVAL job = MV_SORT(job) | LIMIT 1' + version: 2024.04.01 - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } - length: { values: 1 } @@ -566,6 +589,7 @@ values: esql.query: body: query: 'FROM test | STATS job = VALUES(job) BY tag | EVAL job = MV_SORT(job) | SORT tag | LIMIT 10' + version: 2024.04.01 - match: { columns.0.name: "tag" } - match: { columns.0.type: "text" } - match: { columns.1.name: "job" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml index a3be4221712fc..20dd668e0f8c3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml @@ -57,6 +57,7 @@ setup: esql.query: body: query: 'from test | sort emp_no | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -83,6 +84,7 @@ setup: esql.query: body: query: 'from test | where text_ignore_above == "this" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -106,6 +108,7 @@ setup: esql.query: body: query: 'from test | where text_ignore_above == "this is a long text" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -130,6 +133,7 @@ setup: esql.query: body: query: 'from test | where text_ignore_above is null | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -153,6 +157,7 @@ setup: esql.query: body: query: 'from test | where text_ignore_above is not null | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -176,6 +181,7 @@ setup: esql.query: body: query: 'from test | where text_ignore_above LIKE "*long*" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -201,6 +207,7 @@ setup: esql.query: body: query: 'from test | where text_normalizer == "CamelCase" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -225,6 +232,7 @@ setup: esql.query: body: query: 'from test | where text_normalizer == text_normalizer.raw | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -242,7 +250,6 @@ setup: - length: { values: 1 } - match: { values.0: [ "this", "this", "abc", "abc", "bar", "bar" ]} - --- "sort ignore above": - do: @@ -251,6 +258,7 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -275,6 +283,7 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above desc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -299,6 +308,7 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above asc nulls first | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -323,6 +333,7 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above asc nulls last | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -341,8 +352,6 @@ setup: - match: { values.0: [ "this", "this", "abc", "abc", "bar", "bar" ]} - match: { values.1: [ "this is a long text", null, "CamelCase", "camelcase", "foo", "foo"] } - - --- "sort normalizer": - do: @@ -351,6 +360,7 @@ setup: esql.query: body: query: 'from test | sort text_normalizer asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -369,13 +379,13 @@ setup: - match: { values.0: [ "this is a long text", null, "CamelCase", "camelcase", "foo", "foo"] } - match: { values.1: [ "this", "this", "abc", "abc", "bar", "bar" ]} - - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: query: 'from test | sort text_normalizer desc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -394,13 +404,13 @@ setup: - match: { values.0: [ "this", "this", "abc", "abc", "bar", "bar" ]} - match: { values.1: [ "this is a long text", null, "CamelCase", "camelcase", "foo", "foo"] } - - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: query: 'from test | sort text_normalizer.raw asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -428,6 +438,7 @@ setup: esql.query: body: query: 'from test | sort non_indexed asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -452,6 +463,7 @@ setup: esql.query: body: query: 'from test | sort non_indexed desc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -476,6 +488,7 @@ setup: esql.query: body: query: 'from test | where non_indexed == "foo" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' + version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml index f69854388baf3..86ff9626e0077 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml @@ -102,6 +102,7 @@ fetch: esql.query: body: query: 'from test' + version: 2024.04.01 - length: { columns: 18 } - match: { columns.0.name: boolean } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index d7760eb42a1db..9c9ebf24bda87 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -1041,6 +1041,7 @@ public void testDisableFieldNameField() throws IOException { Request esql = new Request("POST", "_query"); esql.setJsonEntity(""" { + "version": "2024.04.01", "query": "FROM nofnf | LIMIT 1" }"""); // {"columns":[{"name":"dv","type":"keyword"},{"name":"no_dv","type":"keyword"}],"values":[["test",null]]} diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml index 4c0bbfd7ec139..e8cd1321db73b 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml @@ -97,6 +97,7 @@ teardown: esql.query: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT tag | LIMIT 10' + version: '2024.04.01' - match: {columns.0.name: "total"} - match: {columns.0.type: "long"} @@ -127,6 +128,7 @@ teardown: gte: "2023-01-02" lte: "2023-01-03" format: "yyyy-MM-dd" + version: '2024.04.01' - match: {columns.0.name: "_index"} - match: {columns.0.type: "keyword"} @@ -198,6 +200,7 @@ teardown: esql.query: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT total DESC | LIMIT 3 | ENRICH suggestions | KEEP tag, total, phrase' + version: '2024.04.01' - match: {columns.0.name: "tag"} - match: {columns.0.type: "keyword"} From c2a3ec42632b0339387121efdef13f52c6c66848 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 16 Apr 2024 08:46:21 -0700 Subject: [PATCH 055/130] Leverage ordinals in enrich lookup (#107449) This change leverages ordinals in enrich lookup. Instead of looking up and extracting enrich fields for all input terms, this improvement only looks up and extracts the dictionary, then applies the ordinals to the enrich results. ``` | 50th percentile | esql_stats_enrich_rates_fares | 242.949 | 34.7007 | -208.248 | ms | -85.72% | | 90th percentile | esql_stats_enrich_rates_fares | 245.479 | 36.3419 | -209.137 | ms | -85.20% | |100th percentile | esql_stats_enrich_rates_fares | 252.877 | 49.0826 | -203.795 | ms | -80.59% | ``` --- docs/changelog/107449.yaml | 5 + .../compute/data/OrdinalBytesRefBlock.java | 8 ++ .../xpack/esql/action/EnrichIT.java | 33 ++++++ .../enrich/EnrichResultBuilderForBoolean.java | 96 ++++++++++++---- .../EnrichResultBuilderForBytesRef.java | 100 ++++++++++++---- .../enrich/EnrichResultBuilderForDouble.java | 96 ++++++++++++---- .../enrich/EnrichResultBuilderForInt.java | 95 +++++++++++---- .../enrich/EnrichResultBuilderForLong.java | 96 ++++++++++++---- .../esql/enrich/EnrichLookupService.java | 15 ++- .../esql/enrich/EnrichResultBuilder.java | 29 +++-- .../esql/enrich/MergePositionsOperator.java | 14 ++- .../esql/enrich/X-EnrichResultBuilder.java.st | 108 ++++++++++++++---- .../esql/enrich/EnrichResultBuilderTests.java | 100 +++++++++++----- .../enrich/MergePositionsOperatorTests.java | 8 +- 14 files changed, 619 insertions(+), 184 deletions(-) create mode 100644 docs/changelog/107449.yaml diff --git a/docs/changelog/107449.yaml b/docs/changelog/107449.yaml new file mode 100644 index 0000000000000..7f0b1bb826e94 --- /dev/null +++ b/docs/changelog/107449.yaml @@ -0,0 +1,5 @@ +pr: 107449 +summary: Leverage ordinals in enrich lookup +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java index 4e409a7d214ef..64e3faca1f517 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java @@ -55,6 +55,14 @@ public boolean isDense() { return ordinals.getTotalValueCount() * 2 / 3 >= bytes.getPositionCount(); } + public IntBlock getOrdinalsBlock() { + return ordinals; + } + + public BytesRefVector getDictionaryVector() { + return bytes; + } + @Override public BytesRef getBytesRef(int valueIndex, BytesRef dest) { return bytes.getBytesRef(ordinals.getInt(valueIndex), dest); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index 43c282e9361f9..c4adfb6885267 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -385,6 +385,39 @@ public void testForPushDownEnrichRule() { } } + /** + * To enable enrich lookup using ordinals + */ + public void testManyDocuments() { + int numDocs = between(200, 2000); + var artists = Map.of("s1", "Eagles", "s2", "Linkin Park", "s3", "Linkin Park", "s4", "Disturbed"); + client().admin() + .indices() + .prepareCreate("many_docs") + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) + .setMapping("song_id", "type=keyword") + .get(); + Map songs = new HashMap<>(); + for (int i = 0; i < numDocs; i++) { + String song = randomFrom(artists.keySet()); + client().prepareIndex("many_docs").setSource("song_id", song).get(); + songs.merge(song, 1L, Long::sum); + } + client().admin().indices().prepareRefresh("many_docs").get(); + try (EsqlQueryResponse resp = run("FROM many_docs | ENRICH songs | STATS count(*) BY artist")) { + List> values = EsqlTestUtils.getValuesList(resp); + Map actual = new HashMap<>(); + for (List value : values) { + actual.merge((String) value.get(1), (Long) value.get(0), Long::sum); + } + Map expected = new HashMap<>(); + for (Map.Entry e : songs.entrySet()) { + expected.merge(artists.get(e.getKey()), e.getValue(), Long::sum); + } + assertThat(actual, equalTo(expected)); + } + } + public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java index 0427afb6d80c8..7978baf0c5f29 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; @@ -25,9 +26,9 @@ final class EnrichResultBuilderForBoolean extends EnrichResultBuilder { private ObjectArray cells; - EnrichResultBuilderForBoolean(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); - this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + EnrichResultBuilderForBoolean(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); + this.cells = blockFactory.bigArrays().newObjectArray(1); } @Override @@ -39,6 +40,7 @@ void addInputPage(IntVector positions, Page page) { continue; } int cellPosition = positions.getInt(i); + cells = blockFactory.bigArrays().grow(cells, cellPosition + 1); final var oldCell = cells.get(cellPosition); final var newCell = extendCell(oldCell, valueCount); cells.set(cellPosition, newCell); @@ -59,30 +61,82 @@ private boolean[] extendCell(boolean[] oldCell, int newValueCount) { } } - @Override - Block build() { - try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(totalPositions)) { - for (int i = 0; i < totalPositions; i++) { - final var cell = cells.get(i); - if (cell == null) { - builder.appendNull(); - continue; - } - if (cell.length > 1) { - builder.beginPositionEntry(); - } - // TODO: sort and dedup - for (var v : cell) { - builder.appendBoolean(v); - } - if (cell.length > 1) { - builder.endPositionEntry(); + private boolean[] combineCell(boolean[] first, boolean[] second) { + if (first == null) { + return second; + } + if (second == null) { + return first; + } + var result = new boolean[first.length + second.length]; + System.arraycopy(first, 0, result, 0, first.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private void appendGroupToBlockBuilder(BooleanBlock.Builder builder, boolean[] group) { + if (group == null) { + builder.appendNull(); + } else if (group.length == 1) { + builder.appendBoolean(group[0]); + } else { + builder.beginPositionEntry(); + // TODO: sort and dedup and set MvOrdering + for (var v : group) { + builder.appendBoolean(v); + } + builder.endPositionEntry(); + } + } + + private boolean[] getCellOrNull(int position) { + return position < cells.size() ? cells.get(position) : null; + } + + private Block buildWithSelected(IntBlock selected) { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int selectedCount = selected.getValueCount(i); + switch (selectedCount) { + case 0 -> builder.appendNull(); + case 1 -> { + int groupId = selected.getInt(selected.getFirstValueIndex(i)); + appendGroupToBlockBuilder(builder, getCellOrNull(groupId)); + } + default -> { + int firstValueIndex = selected.getFirstValueIndex(i); + var cell = getCellOrNull(selected.getInt(firstValueIndex)); + for (int p = 1; p < selectedCount; p++) { + int groupId = selected.getInt(firstValueIndex + p); + cell = combineCell(cell, getCellOrNull(groupId)); + } + appendGroupToBlockBuilder(builder, cell); + } } } return builder.build(); } } + private Block buildWithSelected(IntVector selected) { + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i))); + } + return builder.build(); + } + } + + @Override + Block build(IntBlock selected) { + var vector = selected.asVector(); + if (vector != null) { + return buildWithSelected(vector); + } else { + return buildWithSelected(selected); + } + } + @Override public void close() { Releasables.close(cells, super::close); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java index ff881da5baf44..28326568af63b 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBytesRef.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; @@ -26,14 +27,15 @@ */ final class EnrichResultBuilderForBytesRef extends EnrichResultBuilder { private final BytesRefArray bytes; // shared between all cells + private BytesRef scratch = new BytesRef(); private ObjectArray cells; - EnrichResultBuilderForBytesRef(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); - this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + EnrichResultBuilderForBytesRef(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); + this.cells = blockFactory.bigArrays().newObjectArray(1); BytesRefArray bytes = null; try { - bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + bytes = new BytesRefArray(1L, blockFactory.bigArrays()); this.bytes = bytes; } finally { if (bytes == null) { @@ -52,6 +54,7 @@ void addInputPage(IntVector positions, Page page) { continue; } int cellPosition = positions.getInt(i); + cells = blockFactory.bigArrays().grow(cells, cellPosition + 1); final var oldCell = cells.get(cellPosition); final var newCell = extendCell(oldCell, valueCount); cells.set(cellPosition, newCell); @@ -75,31 +78,82 @@ private int[] extendCell(int[] oldCell, int newValueCount) { } } - @Override - Block build() { - try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(totalPositions)) { - BytesRef scratch = new BytesRef(); - for (int i = 0; i < totalPositions; i++) { - final var cell = cells.get(i); - if (cell == null) { - builder.appendNull(); - continue; - } - if (cell.length > 1) { - builder.beginPositionEntry(); - } - // TODO: sort and dedup - for (var v : cell) { - builder.appendBytesRef(bytes.get(v, scratch)); - } - if (cell.length > 1) { - builder.endPositionEntry(); + private int[] combineCell(int[] first, int[] second) { + if (first == null) { + return second; + } + if (second == null) { + return first; + } + var result = new int[first.length + second.length]; + System.arraycopy(first, 0, result, 0, first.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private void appendGroupToBlockBuilder(BytesRefBlock.Builder builder, int[] group) { + if (group == null) { + builder.appendNull(); + } else if (group.length == 1) { + builder.appendBytesRef(bytes.get(group[0], scratch)); + } else { + builder.beginPositionEntry(); + // TODO: sort and dedup and set MvOrdering + for (var v : group) { + builder.appendBytesRef(bytes.get(v, scratch)); + } + builder.endPositionEntry(); + } + } + + private int[] getCellOrNull(int position) { + return position < cells.size() ? cells.get(position) : null; + } + + private Block buildWithSelected(IntBlock selected) { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int selectedCount = selected.getValueCount(i); + switch (selectedCount) { + case 0 -> builder.appendNull(); + case 1 -> { + int groupId = selected.getInt(selected.getFirstValueIndex(i)); + appendGroupToBlockBuilder(builder, getCellOrNull(groupId)); + } + default -> { + int firstValueIndex = selected.getFirstValueIndex(i); + var cell = getCellOrNull(selected.getInt(firstValueIndex)); + for (int p = 1; p < selectedCount; p++) { + int groupId = selected.getInt(firstValueIndex + p); + cell = combineCell(cell, getCellOrNull(groupId)); + } + appendGroupToBlockBuilder(builder, cell); + } } } return builder.build(); } } + private Block buildWithSelected(IntVector selected) { + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i))); + } + return builder.build(); + } + } + + @Override + Block build(IntBlock selected) { + var vector = selected.asVector(); + if (vector != null) { + return buildWithSelected(vector); + } else { + return buildWithSelected(selected); + } + } + @Override public void close() { Releasables.close(bytes, cells, super::close); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java index 93c178d816326..e15b8f7d6d4b8 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForDouble.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; @@ -25,9 +26,9 @@ final class EnrichResultBuilderForDouble extends EnrichResultBuilder { private ObjectArray cells; - EnrichResultBuilderForDouble(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); - this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + EnrichResultBuilderForDouble(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); + this.cells = blockFactory.bigArrays().newObjectArray(1); } @Override @@ -39,6 +40,7 @@ void addInputPage(IntVector positions, Page page) { continue; } int cellPosition = positions.getInt(i); + cells = blockFactory.bigArrays().grow(cells, cellPosition + 1); final var oldCell = cells.get(cellPosition); final var newCell = extendCell(oldCell, valueCount); cells.set(cellPosition, newCell); @@ -59,30 +61,82 @@ private double[] extendCell(double[] oldCell, int newValueCount) { } } - @Override - Block build() { - try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(totalPositions)) { - for (int i = 0; i < totalPositions; i++) { - final var cell = cells.get(i); - if (cell == null) { - builder.appendNull(); - continue; - } - if (cell.length > 1) { - builder.beginPositionEntry(); - } - // TODO: sort and dedup - for (var v : cell) { - builder.appendDouble(v); - } - if (cell.length > 1) { - builder.endPositionEntry(); + private double[] combineCell(double[] first, double[] second) { + if (first == null) { + return second; + } + if (second == null) { + return first; + } + var result = new double[first.length + second.length]; + System.arraycopy(first, 0, result, 0, first.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private void appendGroupToBlockBuilder(DoubleBlock.Builder builder, double[] group) { + if (group == null) { + builder.appendNull(); + } else if (group.length == 1) { + builder.appendDouble(group[0]); + } else { + builder.beginPositionEntry(); + // TODO: sort and dedup and set MvOrdering + for (var v : group) { + builder.appendDouble(v); + } + builder.endPositionEntry(); + } + } + + private double[] getCellOrNull(int position) { + return position < cells.size() ? cells.get(position) : null; + } + + private Block buildWithSelected(IntBlock selected) { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int selectedCount = selected.getValueCount(i); + switch (selectedCount) { + case 0 -> builder.appendNull(); + case 1 -> { + int groupId = selected.getInt(selected.getFirstValueIndex(i)); + appendGroupToBlockBuilder(builder, getCellOrNull(groupId)); + } + default -> { + int firstValueIndex = selected.getFirstValueIndex(i); + var cell = getCellOrNull(selected.getInt(firstValueIndex)); + for (int p = 1; p < selectedCount; p++) { + int groupId = selected.getInt(firstValueIndex + p); + cell = combineCell(cell, getCellOrNull(groupId)); + } + appendGroupToBlockBuilder(builder, cell); + } } } return builder.build(); } } + private Block buildWithSelected(IntVector selected) { + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i))); + } + return builder.build(); + } + } + + @Override + Block build(IntBlock selected) { + var vector = selected.asVector(); + if (vector != null) { + return buildWithSelected(vector); + } else { + return buildWithSelected(selected); + } + } + @Override public void close() { Releasables.close(cells, super::close); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java index 4dec877e0d1e4..223a8eb88f0b1 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForInt.java @@ -25,9 +25,9 @@ final class EnrichResultBuilderForInt extends EnrichResultBuilder { private ObjectArray cells; - EnrichResultBuilderForInt(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); - this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + EnrichResultBuilderForInt(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); + this.cells = blockFactory.bigArrays().newObjectArray(1); } @Override @@ -39,6 +39,7 @@ void addInputPage(IntVector positions, Page page) { continue; } int cellPosition = positions.getInt(i); + cells = blockFactory.bigArrays().grow(cells, cellPosition + 1); final var oldCell = cells.get(cellPosition); final var newCell = extendCell(oldCell, valueCount); cells.set(cellPosition, newCell); @@ -59,30 +60,82 @@ private int[] extendCell(int[] oldCell, int newValueCount) { } } - @Override - Block build() { - try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(totalPositions)) { - for (int i = 0; i < totalPositions; i++) { - final var cell = cells.get(i); - if (cell == null) { - builder.appendNull(); - continue; - } - if (cell.length > 1) { - builder.beginPositionEntry(); - } - // TODO: sort and dedup - for (var v : cell) { - builder.appendInt(v); - } - if (cell.length > 1) { - builder.endPositionEntry(); + private int[] combineCell(int[] first, int[] second) { + if (first == null) { + return second; + } + if (second == null) { + return first; + } + var result = new int[first.length + second.length]; + System.arraycopy(first, 0, result, 0, first.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private void appendGroupToBlockBuilder(IntBlock.Builder builder, int[] group) { + if (group == null) { + builder.appendNull(); + } else if (group.length == 1) { + builder.appendInt(group[0]); + } else { + builder.beginPositionEntry(); + // TODO: sort and dedup and set MvOrdering + for (var v : group) { + builder.appendInt(v); + } + builder.endPositionEntry(); + } + } + + private int[] getCellOrNull(int position) { + return position < cells.size() ? cells.get(position) : null; + } + + private Block buildWithSelected(IntBlock selected) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int selectedCount = selected.getValueCount(i); + switch (selectedCount) { + case 0 -> builder.appendNull(); + case 1 -> { + int groupId = selected.getInt(selected.getFirstValueIndex(i)); + appendGroupToBlockBuilder(builder, getCellOrNull(groupId)); + } + default -> { + int firstValueIndex = selected.getFirstValueIndex(i); + var cell = getCellOrNull(selected.getInt(firstValueIndex)); + for (int p = 1; p < selectedCount; p++) { + int groupId = selected.getInt(firstValueIndex + p); + cell = combineCell(cell, getCellOrNull(groupId)); + } + appendGroupToBlockBuilder(builder, cell); + } } } return builder.build(); } } + private Block buildWithSelected(IntVector selected) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i))); + } + return builder.build(); + } + } + + @Override + Block build(IntBlock selected) { + var vector = selected.asVector(); + if (vector != null) { + return buildWithSelected(vector); + } else { + return buildWithSelected(selected); + } + } + @Override public void close() { Releasables.close(cells, super::close); diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java index 0dd4d1d0a8a0d..674b2e01c5703 100644 --- a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForLong.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -25,9 +26,9 @@ final class EnrichResultBuilderForLong extends EnrichResultBuilder { private ObjectArray cells; - EnrichResultBuilderForLong(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); - this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + EnrichResultBuilderForLong(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); + this.cells = blockFactory.bigArrays().newObjectArray(1); } @Override @@ -39,6 +40,7 @@ void addInputPage(IntVector positions, Page page) { continue; } int cellPosition = positions.getInt(i); + cells = blockFactory.bigArrays().grow(cells, cellPosition + 1); final var oldCell = cells.get(cellPosition); final var newCell = extendCell(oldCell, valueCount); cells.set(cellPosition, newCell); @@ -59,30 +61,82 @@ private long[] extendCell(long[] oldCell, int newValueCount) { } } - @Override - Block build() { - try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(totalPositions)) { - for (int i = 0; i < totalPositions; i++) { - final var cell = cells.get(i); - if (cell == null) { - builder.appendNull(); - continue; - } - if (cell.length > 1) { - builder.beginPositionEntry(); - } - // TODO: sort and dedup - for (var v : cell) { - builder.appendLong(v); - } - if (cell.length > 1) { - builder.endPositionEntry(); + private long[] combineCell(long[] first, long[] second) { + if (first == null) { + return second; + } + if (second == null) { + return first; + } + var result = new long[first.length + second.length]; + System.arraycopy(first, 0, result, 0, first.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private void appendGroupToBlockBuilder(LongBlock.Builder builder, long[] group) { + if (group == null) { + builder.appendNull(); + } else if (group.length == 1) { + builder.appendLong(group[0]); + } else { + builder.beginPositionEntry(); + // TODO: sort and dedup and set MvOrdering + for (var v : group) { + builder.appendLong(v); + } + builder.endPositionEntry(); + } + } + + private long[] getCellOrNull(int position) { + return position < cells.size() ? cells.get(position) : null; + } + + private Block buildWithSelected(IntBlock selected) { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int selectedCount = selected.getValueCount(i); + switch (selectedCount) { + case 0 -> builder.appendNull(); + case 1 -> { + int groupId = selected.getInt(selected.getFirstValueIndex(i)); + appendGroupToBlockBuilder(builder, getCellOrNull(groupId)); + } + default -> { + int firstValueIndex = selected.getFirstValueIndex(i); + var cell = getCellOrNull(selected.getInt(firstValueIndex)); + for (int p = 1; p < selectedCount; p++) { + int groupId = selected.getInt(firstValueIndex + p); + cell = combineCell(cell, getCellOrNull(groupId)); + } + appendGroupToBlockBuilder(builder, cell); + } } } return builder.build(); } } + private Block buildWithSelected(IntVector selected) { + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i))); + } + return builder.build(); + } + } + + @Override + Block build(IntBlock selected) { + var vector = selected.asVector(); + if (vector != null) { + return buildWithSelected(vector); + } else { + return buildWithSelected(selected); + } + } + @Override public void close() { Releasables.close(cells, super::close); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 366fb4ff55ba6..17d189626d4e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -30,7 +30,10 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LocalCircuitBreaker; +import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; @@ -246,6 +249,14 @@ private void doLookup( ActionListener listener ) { Block inputBlock = inputPage.getBlock(0); + final IntBlock selectedPositions; + if (inputBlock instanceof OrdinalBytesRefBlock ordinalBytesRefBlock) { + inputBlock = ordinalBytesRefBlock.getDictionaryVector().asBlock(); + selectedPositions = ordinalBytesRefBlock.getOrdinalsBlock(); + selectedPositions.mustIncRef(); + } else { + selectedPositions = IntVector.range(0, inputBlock.getPositionCount(), blockFactory).asBlock(); + } LocalCircuitBreaker localBreaker = null; try { if (inputBlock.areAllValuesNull()) { @@ -321,7 +332,7 @@ private void doLookup( // merging field-values by position final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); intermediateOperators.add( - new MergePositionsOperator(inputPage.getPositionCount(), 1, mergingChannels, mergingTypes, driverContext.blockFactory()) + new MergePositionsOperator(1, mergingChannels, mergingTypes, selectedPositions, driverContext.blockFactory()) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); @@ -362,7 +373,7 @@ private void doLookup( } catch (Exception e) { listener.onFailure(e); } finally { - Releasables.close(localBreaker); + Releasables.close(selectedPositions, localBreaker); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java index 5bb42f3090695..062abb1917d84 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilder.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasable; @@ -21,13 +22,11 @@ abstract class EnrichResultBuilder implements Releasable { protected final BlockFactory blockFactory; protected final int channel; - protected final int totalPositions; private long usedBytes; - EnrichResultBuilder(BlockFactory blockFactory, int channel, int totalPositions) { + EnrichResultBuilder(BlockFactory blockFactory, int channel) { this.blockFactory = blockFactory; this.channel = channel; - this.totalPositions = totalPositions; } /** @@ -38,7 +37,7 @@ abstract class EnrichResultBuilder implements Releasable { */ abstract void addInputPage(IntVector positions, Page page); - abstract Block build(); + abstract Block build(IntBlock selected); final void adjustBreaker(long bytes) { blockFactory.breaker().addEstimateBytesAndMaybeBreak(bytes, "<>"); @@ -50,21 +49,21 @@ public void close() { blockFactory.breaker().addWithoutBreaking(-usedBytes); } - static EnrichResultBuilder enrichResultBuilder(ElementType elementType, BlockFactory blockFactory, int channel, int totalPositions) { + static EnrichResultBuilder enrichResultBuilder(ElementType elementType, BlockFactory blockFactory, int channel) { return switch (elementType) { - case NULL -> new EnrichResultBuilderForNull(blockFactory, channel, totalPositions); - case INT -> new EnrichResultBuilderForInt(blockFactory, channel, totalPositions); - case LONG -> new EnrichResultBuilderForLong(blockFactory, channel, totalPositions); - case DOUBLE -> new EnrichResultBuilderForDouble(blockFactory, channel, totalPositions); - case BOOLEAN -> new EnrichResultBuilderForBoolean(blockFactory, channel, totalPositions); - case BYTES_REF -> new EnrichResultBuilderForBytesRef(blockFactory, channel, totalPositions); + case NULL -> new EnrichResultBuilderForNull(blockFactory, channel); + case INT -> new EnrichResultBuilderForInt(blockFactory, channel); + case LONG -> new EnrichResultBuilderForLong(blockFactory, channel); + case DOUBLE -> new EnrichResultBuilderForDouble(blockFactory, channel); + case BOOLEAN -> new EnrichResultBuilderForBoolean(blockFactory, channel); + case BYTES_REF -> new EnrichResultBuilderForBytesRef(blockFactory, channel); default -> throw new IllegalArgumentException("no enrich result builder for [" + elementType + "]"); }; } private static class EnrichResultBuilderForNull extends EnrichResultBuilder { - EnrichResultBuilderForNull(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); + EnrichResultBuilderForNull(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); } @Override @@ -73,8 +72,8 @@ void addInputPage(IntVector positions, Page page) { } @Override - Block build() { - return blockFactory.newConstantNullBlock(totalPositions); + Block build(IntBlock selected) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java index a3b7a8be61e2c..3e1f46100c4f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperator.java @@ -46,16 +46,16 @@ final class MergePositionsOperator implements Operator { private boolean finished = false; private final int positionChannel; - private final EnrichResultBuilder[] builders; + private final IntBlock selectedPositions; private Page outputPage; MergePositionsOperator( - int positionCount, int positionChannel, int[] mergingChannels, ElementType[] mergingTypes, + IntBlock selectedPositions, BlockFactory blockFactory ) { if (mergingChannels.length != mergingTypes.length) { @@ -70,13 +70,15 @@ final class MergePositionsOperator implements Operator { this.builders = new EnrichResultBuilder[mergingTypes.length]; try { for (int i = 0; i < mergingTypes.length; i++) { - builders[i] = EnrichResultBuilder.enrichResultBuilder(mergingTypes[i], blockFactory, mergingChannels[i], positionCount); + builders[i] = EnrichResultBuilder.enrichResultBuilder(mergingTypes[i], blockFactory, mergingChannels[i]); } } finally { if (builders[builders.length - 1] == null) { - Releasables.close(builders); + Releasables.close(Releasables.wrap(builders)); } } + selectedPositions.mustIncRef(); + this.selectedPositions = selectedPositions; } @Override @@ -102,7 +104,7 @@ public void finish() { final Block[] blocks = new Block[builders.length]; try { for (int i = 0; i < builders.length; i++) { - blocks[i] = builders[i].build(); + blocks[i] = builders[i].build(selectedPositions); } outputPage = new Page(blocks); } finally { @@ -127,7 +129,7 @@ public Page getOutput() { @Override public void close() { - Releasables.close(Releasables.wrap(builders), () -> { + Releasables.close(Releasables.wrap(builders), selectedPositions, () -> { if (outputPage != null) { outputPage.releaseBlocks(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st index 4c5c9fabfa797..7066b8b8f12a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/X-EnrichResultBuilder.java.st @@ -18,10 +18,15 @@ import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; $if(long)$ +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.$Type$Block; +$elseif(int)$ +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; $else$ import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; $endif$ import org.elasticsearch.compute.data.Page; @@ -36,16 +41,17 @@ import java.util.Arrays; final class EnrichResultBuilderFor$Type$ extends EnrichResultBuilder { $if(BytesRef)$ private final BytesRefArray bytes; // shared between all cells + private BytesRef scratch = new BytesRef(); $endif$ private ObjectArray<$if(BytesRef)$int$else$$type$$endif$[]> cells; - EnrichResultBuilderFor$Type$(BlockFactory blockFactory, int channel, int totalPositions) { - super(blockFactory, channel, totalPositions); - this.cells = blockFactory.bigArrays().newObjectArray(totalPositions); + EnrichResultBuilderFor$Type$(BlockFactory blockFactory, int channel) { + super(blockFactory, channel); + this.cells = blockFactory.bigArrays().newObjectArray(1); $if(BytesRef)$ BytesRefArray bytes = null; try { - bytes = new BytesRefArray(totalPositions * 3L, blockFactory.bigArrays()); + bytes = new BytesRefArray(1L, blockFactory.bigArrays()); this.bytes = bytes; } finally { if (bytes == null) { @@ -67,6 +73,7 @@ $endif$ continue; } int cellPosition = positions.getInt(i); + cells = blockFactory.bigArrays().grow(cells, cellPosition + 1); final var oldCell = cells.get(cellPosition); final var newCell = extendCell(oldCell, valueCount); cells.set(cellPosition, newCell); @@ -96,37 +103,90 @@ $endif$ } } - @Override - Block build() { - try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(totalPositions)) { + private $if(BytesRef)$int$else$$type$$endif$[] combineCell($if(BytesRef)$int$else$$type$$endif$[] first, $if(BytesRef)$int$else$$type$$endif$[] second) { + if (first == null) { + return second; + } + if (second == null) { + return first; + } + var result = new $if(BytesRef)$int$else$$type$$endif$[first.length + second.length]; + System.arraycopy(first, 0, result, 0, first.length); + System.arraycopy(second, 0, result, first.length, second.length); + return result; + } + + private void appendGroupToBlockBuilder($Type$Block.Builder builder, $if(BytesRef)$int$else$$type$$endif$[] group) { + if (group == null) { + builder.appendNull(); + } else if (group.length == 1) { $if(BytesRef)$ - BytesRef scratch = new BytesRef(); + builder.appendBytesRef(bytes.get(group[0], scratch)); +$else$ + builder.append$Type$(group[0]); $endif$ - for (int i = 0; i < totalPositions; i++) { - final var cell = cells.get(i); - if (cell == null) { - builder.appendNull(); - continue; - } - if (cell.length > 1) { - builder.beginPositionEntry(); - } - // TODO: sort and dedup - for (var v : cell) { + } else { + builder.beginPositionEntry(); + // TODO: sort and dedup and set MvOrdering + for (var v : group) { $if(BytesRef)$ - builder.appendBytesRef(bytes.get(v, scratch)); + builder.appendBytesRef(bytes.get(v, scratch)); $else$ - builder.append$Type$(v); + builder.append$Type$(v); $endif$ - } - if (cell.length > 1) { - builder.endPositionEntry(); + } + builder.endPositionEntry(); + } + } + + private $if(BytesRef)$int$else$$type$$endif$[] getCellOrNull(int position) { + return position < cells.size() ? cells.get(position) : null; + } + + private Block buildWithSelected(IntBlock selected) { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + int selectedCount = selected.getValueCount(i); + switch (selectedCount) { + case 0 -> builder.appendNull(); + case 1 -> { + int groupId = selected.getInt(selected.getFirstValueIndex(i)); + appendGroupToBlockBuilder(builder, getCellOrNull(groupId)); + } + default -> { + int firstValueIndex = selected.getFirstValueIndex(i); + var cell = getCellOrNull(selected.getInt(firstValueIndex)); + for (int p = 1; p < selectedCount; p++) { + int groupId = selected.getInt(firstValueIndex + p); + cell = combineCell(cell, getCellOrNull(groupId)); + } + appendGroupToBlockBuilder(builder, cell); + } } } return builder.build(); } } + private Block buildWithSelected(IntVector selected) { + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(selected.getPositionCount())) { + for (int i = 0; i < selected.getPositionCount(); i++) { + appendGroupToBlockBuilder(builder, getCellOrNull(selected.getInt(i))); + } + return builder.build(); + } + } + + @Override + Block build(IntBlock selected) { + var vector = selected.asVector(); + if (vector != null) { + return buildWithSelected(vector); + } else { + return buildWithSelected(selected); + } + } + @Override public void close() { Releasables.close($if(BytesRef)$bytes, $endif$cells, super::close); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java index f6e8b9107504c..24ca02a9d2e07 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderTests.java @@ -15,6 +15,8 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.test.ESTestCase; @@ -30,10 +32,10 @@ public class EnrichResultBuilderTests extends ESTestCase { public void testBytesRef() { BlockFactory blockFactory = blockFactory(); - Map> expectedValues = new HashMap<>(); + Map> inputValues = new HashMap<>(); int numPages = between(0, 10); int maxPosition = between(0, 100); - var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.BYTES_REF, blockFactory, 0, maxPosition + 1); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.BYTES_REF, blockFactory, 0); for (int i = 0; i < numPages; i++) { int numRows = between(1, 100); try ( @@ -52,7 +54,7 @@ public void testBytesRef() { } for (int v = 0; v < numValues; v++) { BytesRef val = new BytesRef(randomByteArrayOfLength(10)); - expectedValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); + inputValues.computeIfAbsent(position, k -> new ArrayList<>()).add(val); valuesBuilder.appendBytesRef(val); } if (numValues > 1) { @@ -64,18 +66,60 @@ public void testBytesRef() { } } } - try (BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build()) { - assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); - for (int i = 0; i < actualOutput.getPositionCount(); i++) { - List values = expectedValues.get(i); - if (actualOutput.isNull(i)) { - assertNull(values); + try (IntVector selected = IntVector.range(0, maxPosition + 1, blockFactory)) { + try (BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build(selected.asBlock())) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = inputValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + } + } + } + } + } + try (IntBlock.Builder selectedBuilder = blockFactory.newIntBlockBuilder(between(1, 10))) { + int selectedPositions = between(1, 100); + Map> expectedValues = new HashMap<>(); + for (int i = 0; i < selectedPositions; i++) { + int ps = randomIntBetween(0, 3); + List values = new ArrayList<>(); + if (ps == 0) { + selectedBuilder.appendNull(); } else { - int valueCount = actualOutput.getValueCount(i); - int first = actualOutput.getFirstValueIndex(i); - assertThat(valueCount, equalTo(values.size())); - for (int v = 0; v < valueCount; v++) { - assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + selectedBuilder.beginPositionEntry(); + for (int p = 0; p < ps; p++) { + int position = randomIntBetween(0, maxPosition); + selectedBuilder.appendInt(position); + values.addAll(inputValues.getOrDefault(position, List.of())); + } + selectedBuilder.endPositionEntry(); + } + if (values.isEmpty()) { + expectedValues.put(i, null); + } else { + expectedValues.put(i, values); + } + } + try (var selected = selectedBuilder.build(); BytesRefBlock actualOutput = (BytesRefBlock) resultBuilder.build(selected)) { + assertThat(actualOutput.getPositionCount(), equalTo(selected.getPositionCount())); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getBytesRef(first + v, new BytesRef()), equalTo(values.get(v))); + } } } } @@ -89,7 +133,7 @@ public void testLong() { Map> expectedValues = new HashMap<>(); int numPages = between(0, 10); int maxPosition = between(0, 100); - var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.LONG, blockFactory, 0, maxPosition + 1); + var resultBuilder = EnrichResultBuilder.enrichResultBuilder(ElementType.LONG, blockFactory, 0); for (int i = 0; i < numPages; i++) { int numRows = between(1, 100); try ( @@ -120,18 +164,20 @@ public void testLong() { } } } - try (LongBlock actualOutput = (LongBlock) resultBuilder.build()) { - assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); - for (int i = 0; i < actualOutput.getPositionCount(); i++) { - List values = expectedValues.get(i); - if (actualOutput.isNull(i)) { - assertNull(values); - } else { - int valueCount = actualOutput.getValueCount(i); - int first = actualOutput.getFirstValueIndex(i); - assertThat(valueCount, equalTo(values.size())); - for (int v = 0; v < valueCount; v++) { - assertThat(actualOutput.getLong(first + v), equalTo(values.get(v))); + try (IntVector selected = IntVector.range(0, maxPosition + 1, blockFactory)) { + try (LongBlock actualOutput = (LongBlock) resultBuilder.build(selected.asBlock())) { + assertThat(actualOutput.getPositionCount(), equalTo(maxPosition + 1)); + for (int i = 0; i < actualOutput.getPositionCount(); i++) { + List values = expectedValues.get(i); + if (actualOutput.isNull(i)) { + assertNull(values); + } else { + int valueCount = actualOutput.getValueCount(i); + int first = actualOutput.getFirstValueIndex(i); + assertThat(valueCount, equalTo(values.size())); + for (int v = 0; v < valueCount; v++) { + assertThat(actualOutput.getLong(first + v), equalTo(values.get(v))); + } } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java index 09bc36a5390af..df49fff5191bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/MergePositionsOperatorTests.java @@ -18,7 +18,9 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; import java.util.List; @@ -31,11 +33,12 @@ public void testSimple() throws Exception { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = new BlockFactory(breaker, bigArrays); + IntVector selected = IntVector.range(0, 7, blockFactory); MergePositionsOperator mergeOperator = new MergePositionsOperator( - 7, 0, new int[] { 1, 2 }, new ElementType[] { ElementType.BYTES_REF, ElementType.INT }, + selected.asBlock(), blockFactory ); { @@ -123,8 +126,7 @@ public void testSimple() throws Exception { assertTrue(f2.isNull(4)); assertThat(BlockUtils.toJavaObject(f2, 5), equalTo(2023)); assertTrue(f2.isNull(6)); - mergeOperator.close(); - out.releaseBlocks(); + Releasables.close(mergeOperator, selected, out::releaseBlocks); MockBigArrays.ensureAllArraysAreReleased(); } } From f8fe610966e8f9aae222b961ff05c9d1b418de78 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 16 Apr 2024 17:15:59 +0100 Subject: [PATCH 056/130] [ML] Add GET _inference for all inference endpoints (#107517) --- docs/changelog/107517.yaml | 5 +++++ .../reference/inference/get-inference.asciidoc | 2 ++ .../rest-api-spec/api/inference.get_model.json | 6 ++++++ .../rest/RestGetInferenceModelAction.java | 7 ++++++- .../test/inference/inference_crud.yml | 18 ++++++++++++++++++ 5 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/107517.yaml diff --git a/docs/changelog/107517.yaml b/docs/changelog/107517.yaml new file mode 100644 index 0000000000000..4d7830699ad49 --- /dev/null +++ b/docs/changelog/107517.yaml @@ -0,0 +1,5 @@ +pr: 107517 +summary: Add GET `_inference` for all inference endpoints +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 2cfc17a3b6203..74a430139d89a 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -18,6 +18,8 @@ non-NLP models, use the <>. [[get-inference-api-request]] ==== {api-request-title} +`GET /_inference` + `GET /_inference/_all` `GET /_inference/` diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json index f9340810e2e43..3749c2ec9577e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json @@ -11,6 +11,12 @@ }, "url":{ "paths":[ + { + "path":"/_inference", + "methods":[ + "GET" + ] + }, { "path":"/_inference/{inference_id}", "methods":[ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 4de6ff7af1f15..34d0f2647b2db 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -34,7 +34,12 @@ public String getName() { @Override public List routes() { - return List.of(new Route(GET, "_inference/_all"), new Route(GET, INFERENCE_ID_PATH), new Route(GET, TASK_TYPE_INFERENCE_ID_PATH)); + return List.of( + new Route(GET, "_inference"), + new Route(GET, "_inference/_all"), + new Route(GET, INFERENCE_ID_PATH), + new Route(GET, TASK_TYPE_INFERENCE_ID_PATH) + ); } @Override diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml index 39a107373c8a3..ec8ca43a44b24 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -38,3 +38,21 @@ "input": "important text" } - match: { error.reason: "Unknown task_type [bad]" } + +--- +"Test get all": + - do: + inference.get_model: + inference_id: "*" + - length: { models: 0} + + - do: + inference.get_model: + inference_id: _all + - length: { models: 0} + + - do: + inference.get_model: + inference_id: "" + - length: { models: 0} + From c2df24b2bafb106e745d35073608e547776d78ab Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 16 Apr 2024 12:26:01 -0400 Subject: [PATCH 057/130] [ML] Refactor models to expose fields used for rate limiting (#107444) * Adding rate limit interface to openai and hugging face * Adding cohere rate limit service settings * Switching to request manager * Adjusting constructor scope * Removing uri from cohere * Adding rate limit fields for azure * Fixing spotless * Only storing hash code values --- .../AzureOpenAiEmbeddingsAction.java | 6 +-- .../action/cohere/CohereActionCreator.java | 4 +- .../action/cohere/CohereEmbeddingsAction.java | 9 +++-- .../action/cohere/CohereRerankAction.java | 9 +++-- .../action/huggingface/HuggingFaceAction.java | 11 +++-- .../openai/OpenAiChatCompletionAction.java | 6 +-- .../action/openai/OpenAiEmbeddingsAction.java | 6 +-- .../external/cohere/CohereAccount.java | 15 ++++++- .../external/http/RequestExecutor.java | 4 +- ... AzureOpenAiEmbeddingsRequestManager.java} | 21 ++++++---- .../sender/AzureOpenAiRequestManager.java | 30 ++++++++++++++ .../http/sender/BaseRequestManager.java | 40 +++++++++++++++++++ ...va => CohereEmbeddingsRequestManager.java} | 17 ++++---- .../http/sender/CohereRequestManager.java | 28 +++++++++++++ ...r.java => CohereRerankRequestManager.java} | 19 +++++---- .../http/sender/HttpRequestSender.java | 2 +- ...or.java => HuggingFaceRequestManager.java} | 39 +++++++++++++----- .../http/sender/InferenceRequest.java | 2 +- .../external/http/sender/NoopTask.java | 2 +- ...va => OpenAiCompletionRequestManager.java} | 22 +++++----- ...va => OpenAiEmbeddingsRequestManager.java} | 25 +++++++----- .../http/sender/OpenAiRequestManager.java | 35 ++++++++++++++++ .../http/sender/RequestExecutorService.java | 2 +- ...equestCreator.java => RequestManager.java} | 5 ++- .../external/http/sender/RequestTask.java | 6 +-- .../external/http/sender/Sender.java | 2 +- .../huggingface/HuggingFaceAccount.java | 9 ++++- .../external/openai/OpenAiAccount.java | 14 ++++++- .../external/ratelimit/RateLimitable.java | 25 ++++++++++++ .../AzureOpenAiEmbeddingsRequest.java | 11 ++--- .../cohere/CohereEmbeddingsRequest.java | 14 +++---- .../request/cohere/CohereRerankRequest.java | 14 +++---- .../HuggingFaceInferenceRequest.java | 15 +++---- .../openai/OpenAiChatCompletionRequest.java | 14 +++---- .../openai/OpenAiEmbeddingsRequest.java | 21 +++------- .../inference/services/ServiceUtils.java | 6 +++ .../azureopenai/AzureOpenAiModel.java | 18 ++++++++- .../AzureOpenAiRateLimitServiceSettings.java | 15 +++++++ .../AzureOpenAiEmbeddingsModel.java | 6 ++- .../AzureOpenAiEmbeddingsServiceSettings.java | 5 ++- .../services/cohere/CohereModel.java | 21 +++++++++- .../embeddings/CohereEmbeddingsModel.java | 12 +++++- .../cohere/rerank/CohereRerankModel.java | 12 +++++- .../huggingface/HuggingFaceModel.java | 30 +++++++++++--- .../HuggingFaceRateLimitServiceSettings.java | 17 ++++++++ .../HuggingFaceServiceSettings.java | 3 +- .../elser/HuggingFaceElserModel.java | 19 +++------ .../elser/HuggingFaceElserSecretSettings.java | 3 +- .../HuggingFaceElserServiceSettings.java | 6 ++- .../HuggingFaceEmbeddingsModel.java | 19 +++------ .../services/openai/OpenAiModel.java | 32 ++++++++++++++- .../OpenAiRateLimitServiceSettings.java | 21 ++++++++++ .../completion/OpenAiChatCompletionModel.java | 7 +++- .../OpenAiChatCompletionServiceSettings.java | 6 ++- .../embeddings/OpenAiEmbeddingsModel.java | 7 +++- .../OpenAiEmbeddingsServiceSettings.java | 6 ++- .../services/settings/ApiKeySecrets.java | 14 +++++++ .../settings/DefaultSecretSettings.java | 2 +- .../cohere/CohereEmbeddingsActionTests.java | 2 +- .../sender/ExecutableRequestCreatorTests.java | 10 ++--- .../http/sender/HttpRequestSenderTests.java | 2 +- ...beddingsExecutableRequestCreatorTests.java | 15 ++++--- .../sender/RequestExecutorServiceTests.java | 4 +- .../http/sender/RequestTaskTests.java | 10 ++--- .../sender/SingleRequestManagerTests.java | 2 +- .../AzureOpenAiEmbeddingsRequestTests.java | 4 -- .../cohere/CohereEmbeddingsRequestTests.java | 4 +- .../HuggingFaceInferenceRequestTests.java | 4 -- .../OpenAiChatCompletionRequestTests.java | 9 +---- .../openai/OpenAiEmbeddingsRequestTests.java | 3 -- 70 files changed, 618 insertions(+), 242 deletions(-) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{AzureOpenAiEmbeddingsExecutableRequestCreator.java => AzureOpenAiEmbeddingsRequestManager.java} (77%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{CohereEmbeddingsExecutableRequestCreator.java => CohereEmbeddingsRequestManager.java} (79%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRequestManager.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{CohereRerankExecutableRequestCreator.java => CohereRerankRequestManager.java} (76%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{HuggingFaceExecutableRequestCreator.java => HuggingFaceRequestManager.java} (62%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{OpenAiCompletionExecutableRequestCreator.java => OpenAiCompletionRequestManager.java} (78%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{OpenAiEmbeddingsExecutableRequestCreator.java => OpenAiEmbeddingsRequestManager.java} (77%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiRequestManager.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{ExecutableRequestCreator.java => RequestManager.java} (86%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ratelimit/RateLimitable.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiRateLimitServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceRateLimitServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiRateLimitServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/ApiKeySecrets.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsAction.java index a682ad2bb23d5..1b2226dd3f9f7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiEmbeddingsAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiEmbeddingsExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiEmbeddingsRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -27,14 +27,14 @@ public class AzureOpenAiEmbeddingsAction implements ExecutableAction { private final String errorMessage; - private final AzureOpenAiEmbeddingsExecutableRequestCreator requestCreator; + private final AzureOpenAiEmbeddingsRequestManager requestCreator; private final Sender sender; public AzureOpenAiEmbeddingsAction(Sender sender, AzureOpenAiEmbeddingsModel model, ServiceComponents serviceComponents) { Objects.requireNonNull(serviceComponents); Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); - requestCreator = new AzureOpenAiEmbeddingsExecutableRequestCreator(model, serviceComponents.truncator()); + requestCreator = new AzureOpenAiEmbeddingsRequestManager(model, serviceComponents.truncator(), serviceComponents.threadPool()); errorMessage = constructFailedToSendRequestMessage(model.getUri(), "Azure OpenAI embeddings"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java index b8e1b34c11f27..9f54950dba2d3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereActionCreator.java @@ -33,13 +33,13 @@ public CohereActionCreator(Sender sender, ServiceComponents serviceComponents) { public ExecutableAction create(CohereEmbeddingsModel model, Map taskSettings, InputType inputType) { var overriddenModel = CohereEmbeddingsModel.of(model, taskSettings, inputType); - return new CohereEmbeddingsAction(sender, overriddenModel); + return new CohereEmbeddingsAction(sender, overriddenModel, serviceComponents.threadPool()); } @Override public ExecutableAction create(CohereRerankModel model, Map taskSettings) { var overriddenModel = CohereRerankModel.of(model, taskSettings); - return new CohereRerankAction(sender, overriddenModel); + return new CohereRerankAction(sender, overriddenModel, serviceComponents.threadPool()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java index f4fddf65ea218..63e51d99a8cee 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -11,8 +11,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.http.sender.CohereEmbeddingsExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.CohereEmbeddingsRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; @@ -26,16 +27,16 @@ public class CohereEmbeddingsAction implements ExecutableAction { private final String failedToSendRequestErrorMessage; private final Sender sender; - private final CohereEmbeddingsExecutableRequestCreator requestCreator; + private final CohereEmbeddingsRequestManager requestCreator; - public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model) { + public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model, ThreadPool threadPool) { Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( model.getServiceSettings().getCommonSettings().uri(), "Cohere embeddings" ); - requestCreator = new CohereEmbeddingsExecutableRequestCreator(model); + requestCreator = CohereEmbeddingsRequestManager.of(model, threadPool); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java index 5209781b00583..0613b8ef76453 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereRerankAction.java @@ -11,8 +11,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.http.sender.CohereRerankExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.CohereRerankRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; @@ -26,16 +27,16 @@ public class CohereRerankAction implements ExecutableAction { private final String failedToSendRequestErrorMessage; private final Sender sender; - private final CohereRerankExecutableRequestCreator requestCreator; + private final CohereRerankRequestManager requestCreator; - public CohereRerankAction(Sender sender, CohereRerankModel model) { + public CohereRerankAction(Sender sender, CohereRerankModel model, ThreadPool threadPool) { Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( model.getServiceSettings().getCommonSettings().uri(), "Cohere rerank" ); - requestCreator = new CohereRerankExecutableRequestCreator(model); + requestCreator = CohereRerankRequestManager.of(model, threadPool); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java index 6d6580c391cf8..1e5f01f801e17 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.http.sender.HuggingFaceExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.HuggingFaceRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; @@ -28,7 +28,7 @@ public class HuggingFaceAction implements ExecutableAction { private final String errorMessage; private final Sender sender; - private final HuggingFaceExecutableRequestCreator requestCreator; + private final HuggingFaceRequestManager requestCreator; public HuggingFaceAction( Sender sender, @@ -40,7 +40,12 @@ public HuggingFaceAction( Objects.requireNonNull(serviceComponents); Objects.requireNonNull(requestType); this.sender = Objects.requireNonNull(sender); - requestCreator = new HuggingFaceExecutableRequestCreator(model, responseHandler, serviceComponents.truncator()); + requestCreator = HuggingFaceRequestManager.of( + model, + responseHandler, + serviceComponents.truncator(), + serviceComponents.threadPool() + ); errorMessage = format( "Failed to send Hugging Face %s request from inference entity id [%s]", requestType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java index c474f3bfb0ab7..5d75adedddde0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; -import org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; @@ -30,7 +30,7 @@ public class OpenAiChatCompletionAction implements ExecutableAction { private final String errorMessage; - private final OpenAiCompletionExecutableRequestCreator requestCreator; + private final OpenAiCompletionRequestManager requestCreator; private final Sender sender; @@ -38,7 +38,7 @@ public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model Objects.requireNonNull(serviceComponents); Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); - this.requestCreator = new OpenAiCompletionExecutableRequestCreator(model); + this.requestCreator = OpenAiCompletionRequestManager.of(model, serviceComponents.threadPool()); this.errorMessage = constructFailedToSendRequestMessage(model.getServiceSettings().uri(), "OpenAI chat completions"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java index 8a64b9f922aca..3e92d206b4257 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; -import org.elasticsearch.xpack.inference.external.http.sender.OpenAiEmbeddingsExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.OpenAiEmbeddingsRequestManager; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; @@ -27,14 +27,14 @@ public class OpenAiEmbeddingsAction implements ExecutableAction { private final String errorMessage; - private final OpenAiEmbeddingsExecutableRequestCreator requestCreator; + private final OpenAiEmbeddingsRequestManager requestCreator; private final Sender sender; public OpenAiEmbeddingsAction(Sender sender, OpenAiEmbeddingsModel model, ServiceComponents serviceComponents) { Objects.requireNonNull(serviceComponents); Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); - requestCreator = new OpenAiEmbeddingsExecutableRequestCreator(model, serviceComponents.truncator()); + requestCreator = OpenAiEmbeddingsRequestManager.of(model, serviceComponents.truncator(), serviceComponents.threadPool()); errorMessage = constructFailedToSendRequestMessage(model.getServiceSettings().uri(), "OpenAI embeddings"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java index 9847d496d14ee..9fc5640f8cf79 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereAccount.java @@ -7,15 +7,26 @@ package org.elasticsearch.xpack.inference.external.cohere; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.inference.services.cohere.CohereModel; import java.net.URI; +import java.net.URISyntaxException; import java.util.Objects; -public record CohereAccount(@Nullable URI url, SecureString apiKey) { +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; + +public record CohereAccount(URI uri, SecureString apiKey) { + + public static CohereAccount of(CohereModel model, CheckedSupplier uriBuilder) { + var uri = buildUri(model.uri(), "Cohere", uriBuilder); + + return new CohereAccount(uri, model.apiKey()); + } public CohereAccount { + Objects.requireNonNull(uri); Objects.requireNonNull(apiKey); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java index 7b0287e9652f7..63c042ce8a623 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java @@ -11,8 +11,8 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.http.sender.ExecutableRequestCreator; import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; import java.util.concurrent.TimeUnit; @@ -28,7 +28,7 @@ public interface RequestExecutor { boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException; void execute( - ExecutableRequestCreator requestCreator, + RequestManager requestCreator, InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java similarity index 77% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java index b3f53d5f3f236..06152b50822aa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiEmbeddingsRequestManager.java @@ -12,8 +12,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.Truncator; -import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiAccount; import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; @@ -27,9 +27,9 @@ import static org.elasticsearch.xpack.inference.common.Truncator.truncate; -public class AzureOpenAiEmbeddingsExecutableRequestCreator implements ExecutableRequestCreator { +public class AzureOpenAiEmbeddingsRequestManager extends AzureOpenAiRequestManager { - private static final Logger logger = LogManager.getLogger(AzureOpenAiEmbeddingsExecutableRequestCreator.class); + private static final Logger logger = LogManager.getLogger(AzureOpenAiEmbeddingsRequestManager.class); private static final ResponseHandler HANDLER = createEmbeddingsHandler(); @@ -37,13 +37,20 @@ private static ResponseHandler createEmbeddingsHandler() { return new AzureOpenAiResponseHandler("azure openai text embedding", OpenAiEmbeddingsResponseEntity::fromResponse); } + public static AzureOpenAiEmbeddingsRequestManager of(AzureOpenAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) { + return new AzureOpenAiEmbeddingsRequestManager( + Objects.requireNonNull(model), + Objects.requireNonNull(truncator), + Objects.requireNonNull(threadPool) + ); + } + private final Truncator truncator; private final AzureOpenAiEmbeddingsModel model; - private final AzureOpenAiAccount account; - public AzureOpenAiEmbeddingsExecutableRequestCreator(AzureOpenAiEmbeddingsModel model, Truncator truncator) { + public AzureOpenAiEmbeddingsRequestManager(AzureOpenAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) { + super(threadPool, model); this.model = Objects.requireNonNull(model); - this.account = AzureOpenAiAccount.fromModel(model); this.truncator = Objects.requireNonNull(truncator); } @@ -57,7 +64,7 @@ public Runnable create( ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); - AzureOpenAiEmbeddingsRequest request = new AzureOpenAiEmbeddingsRequest(truncator, account, truncatedInput, model); + AzureOpenAiEmbeddingsRequest request = new AzureOpenAiEmbeddingsRequest(truncator, truncatedInput, model); return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiRequestManager.java new file mode 100644 index 0000000000000..312ac8de262ab --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiRequestManager.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; + +import java.util.Objects; + +public abstract class AzureOpenAiRequestManager extends BaseRequestManager { + protected AzureOpenAiRequestManager(ThreadPool threadPool, AzureOpenAiModel model) { + super(threadPool, model.getInferenceEntityId(), RateLimitGrouping.of(model)); + } + + record RateLimitGrouping(int resourceNameHash, int deploymentIdHash) { + public static RateLimitGrouping of(AzureOpenAiModel model) { + Objects.requireNonNull(model); + + return new RateLimitGrouping( + model.rateLimitServiceSettings().resourceName().hashCode(), + model.rateLimitServiceSettings().deploymentId().hashCode() + ); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java new file mode 100644 index 0000000000000..b26489765e07a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/BaseRequestManager.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +abstract class BaseRequestManager implements RequestManager { + private final ThreadPool threadPool; + private final String inferenceEntityId; + private final Object rateLimitGroup; + + BaseRequestManager(ThreadPool threadPool, String inferenceEntityId, Object rateLimitGroup) { + this.threadPool = Objects.requireNonNull(threadPool); + this.inferenceEntityId = Objects.requireNonNull(inferenceEntityId); + this.rateLimitGroup = Objects.requireNonNull(rateLimitGroup); + } + + protected void execute(Runnable runnable) { + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(runnable); + } + + @Override + public String inferenceEntityId() { + return inferenceEntityId; + } + + @Override + public Object rateLimitGrouping() { + return rateLimitGroup; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java similarity index 79% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java index 6488996d2edc9..0bf1c11285adb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.cohere.CohereResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; @@ -24,20 +24,23 @@ import java.util.Objects; import java.util.function.Supplier; -public class CohereEmbeddingsExecutableRequestCreator implements ExecutableRequestCreator { - private static final Logger logger = LogManager.getLogger(CohereEmbeddingsExecutableRequestCreator.class); +public class CohereEmbeddingsRequestManager extends CohereRequestManager { + private static final Logger logger = LogManager.getLogger(CohereEmbeddingsRequestManager.class); private static final ResponseHandler HANDLER = createEmbeddingsHandler(); private static ResponseHandler createEmbeddingsHandler() { return new CohereResponseHandler("cohere text embedding", CohereEmbeddingsResponseEntity::fromResponse); } - private final CohereAccount account; + public static CohereEmbeddingsRequestManager of(CohereEmbeddingsModel model, ThreadPool threadPool) { + return new CohereEmbeddingsRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } + private final CohereEmbeddingsModel model; - public CohereEmbeddingsExecutableRequestCreator(CohereEmbeddingsModel model) { + private CohereEmbeddingsRequestManager(CohereEmbeddingsModel model, ThreadPool threadPool) { + super(threadPool, model); this.model = Objects.requireNonNull(model); - account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().uri(), this.model.getSecretSettings().apiKey()); } @Override @@ -49,7 +52,7 @@ public Runnable create( HttpClientContext context, ActionListener listener ) { - CohereEmbeddingsRequest request = new CohereEmbeddingsRequest(account, input, model); + CohereEmbeddingsRequest request = new CohereEmbeddingsRequest(input, model); return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRequestManager.java new file mode 100644 index 0000000000000..7ce71439f9882 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRequestManager.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.services.cohere.CohereModel; + +import java.util.Objects; + +abstract class CohereRequestManager extends BaseRequestManager { + + protected CohereRequestManager(ThreadPool threadPool, CohereModel model) { + super(threadPool, model.getInferenceEntityId(), RateLimitGrouping.of(model)); + } + + record RateLimitGrouping(int apiKeyHash) { + public static RateLimitGrouping of(CohereModel model) { + Objects.requireNonNull(model); + + return new RateLimitGrouping(model.apiKey().hashCode()); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java similarity index 76% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java index 432a5334ac001..1778663a194e8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.cohere.CohereResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; @@ -24,20 +24,23 @@ import java.util.Objects; import java.util.function.Supplier; -public class CohereRerankExecutableRequestCreator implements ExecutableRequestCreator { - private static final Logger logger = LogManager.getLogger(CohereRerankExecutableRequestCreator.class); +public class CohereRerankRequestManager extends CohereRequestManager { + private static final Logger logger = LogManager.getLogger(CohereRerankRequestManager.class); private static final ResponseHandler HANDLER = createCohereResponseHandler(); private static ResponseHandler createCohereResponseHandler() { return new CohereResponseHandler("cohere rerank", (request, response) -> CohereRankedResponseEntity.fromResponse(response)); } - private final CohereAccount account; + public static CohereRerankRequestManager of(CohereRerankModel model, ThreadPool threadPool) { + return new CohereRerankRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } + private final CohereRerankModel model; - public CohereRerankExecutableRequestCreator(CohereRerankModel model) { - this.model = Objects.requireNonNull(model); - account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().uri(), this.model.getSecretSettings().apiKey()); + private CohereRerankRequestManager(CohereRerankModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = model; } @Override @@ -49,7 +52,7 @@ public Runnable create( HttpClientContext context, ActionListener listener ) { - CohereRerankRequest request = new CohereRerankRequest(account, query, input, model); + CohereRerankRequest request = new CohereRerankRequest(query, input, model); return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java index a98b172ccbd4d..d337860848160 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSender.java @@ -124,7 +124,7 @@ public void close() throws IOException { */ @Override public void send( - ExecutableRequestCreator requestCreator, + RequestManager requestCreator, InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java similarity index 62% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java index 7c70f738105d1..a06a6da2bbb15 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HuggingFaceRequestManager.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; @@ -25,19 +26,32 @@ import static org.elasticsearch.xpack.inference.common.Truncator.truncate; -public class HuggingFaceExecutableRequestCreator implements ExecutableRequestCreator { - private static final Logger logger = LogManager.getLogger(HuggingFaceExecutableRequestCreator.class); +public class HuggingFaceRequestManager extends BaseRequestManager { + private static final Logger logger = LogManager.getLogger(HuggingFaceRequestManager.class); + + public static HuggingFaceRequestManager of( + HuggingFaceModel model, + ResponseHandler responseHandler, + Truncator truncator, + ThreadPool threadPool + ) { + return new HuggingFaceRequestManager( + Objects.requireNonNull(model), + Objects.requireNonNull(responseHandler), + Objects.requireNonNull(truncator), + Objects.requireNonNull(threadPool) + ); + } private final HuggingFaceModel model; - private final HuggingFaceAccount account; private final ResponseHandler responseHandler; private final Truncator truncator; - public HuggingFaceExecutableRequestCreator(HuggingFaceModel model, ResponseHandler responseHandler, Truncator truncator) { - this.model = Objects.requireNonNull(model); - account = new HuggingFaceAccount(model.getUri(), model.getApiKey()); - this.responseHandler = Objects.requireNonNull(responseHandler); - this.truncator = Objects.requireNonNull(truncator); + private HuggingFaceRequestManager(HuggingFaceModel model, ResponseHandler responseHandler, Truncator truncator, ThreadPool threadPool) { + super(threadPool, model.getInferenceEntityId(), RateLimitGrouping.of(model)); + this.model = model; + this.responseHandler = responseHandler; + this.truncator = truncator; } @Override @@ -50,7 +64,7 @@ public Runnable create( ActionListener listener ) { var truncatedInput = truncate(input, model.getTokenLimit()); - var request = new HuggingFaceInferenceRequest(truncator, account, truncatedInput, model); + var request = new HuggingFaceInferenceRequest(truncator, truncatedInput, model); return new ExecutableInferenceRequest( requestSender, @@ -62,4 +76,11 @@ public Runnable create( listener ); } + + record RateLimitGrouping(int accountHash) { + + public static RateLimitGrouping of(HuggingFaceModel model) { + return new RateLimitGrouping(new HuggingFaceAccount(model.rateLimitServiceSettings().uri(), model.apiKey()).hashCode()); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java index 5d5e8df40c22d..3c711bb79717c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/InferenceRequest.java @@ -21,7 +21,7 @@ public interface InferenceRequest { /** * Returns the creator that handles building an executable request based on the input provided. */ - ExecutableRequestCreator getRequestCreator(); + RequestManager getRequestCreator(); /** * Returns the query associated with this request. Used for Rerank tasks. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java index cca00b2e9bf58..0355880b3f714 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java @@ -16,7 +16,7 @@ class NoopTask implements RejectableTask { @Override - public ExecutableRequestCreator getRequestCreator() { + public RequestManager getRequestCreator() { return null; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java similarity index 78% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java index 853038e1a7ca4..9c6c216c61272 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java @@ -13,9 +13,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; import org.elasticsearch.xpack.inference.external.openai.OpenAiChatCompletionResponseHandler; import org.elasticsearch.xpack.inference.external.request.openai.OpenAiChatCompletionRequest; import org.elasticsearch.xpack.inference.external.response.openai.OpenAiChatCompletionResponseEntity; @@ -25,23 +25,21 @@ import java.util.Objects; import java.util.function.Supplier; -public class OpenAiCompletionExecutableRequestCreator implements ExecutableRequestCreator { +public class OpenAiCompletionRequestManager extends OpenAiRequestManager { - private static final Logger logger = LogManager.getLogger(OpenAiCompletionExecutableRequestCreator.class); + private static final Logger logger = LogManager.getLogger(OpenAiCompletionRequestManager.class); private static final ResponseHandler HANDLER = createCompletionHandler(); - private final OpenAiChatCompletionModel model; + public static OpenAiCompletionRequestManager of(OpenAiChatCompletionModel model, ThreadPool threadPool) { + return new OpenAiCompletionRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } - private final OpenAiAccount account; + private final OpenAiChatCompletionModel model; - public OpenAiCompletionExecutableRequestCreator(OpenAiChatCompletionModel model) { + private OpenAiCompletionRequestManager(OpenAiChatCompletionModel model, ThreadPool threadPool) { + super(threadPool, model, OpenAiChatCompletionRequest::buildDefaultUri); this.model = Objects.requireNonNull(model); - this.account = new OpenAiAccount( - this.model.getServiceSettings().uri(), - this.model.getServiceSettings().organizationId(), - this.model.getSecretSettings().apiKey() - ); } @Override @@ -53,7 +51,7 @@ public Runnable create( HttpClientContext context, ActionListener listener ) { - OpenAiChatCompletionRequest request = new OpenAiChatCompletionRequest(account, input, model); + OpenAiChatCompletionRequest request = new OpenAiChatCompletionRequest(input, model); return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java similarity index 77% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java index 8f867c374e2d3..3a0a8fd64a656 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsRequestManager.java @@ -12,10 +12,10 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; import org.elasticsearch.xpack.inference.external.openai.OpenAiResponseHandler; import org.elasticsearch.xpack.inference.external.request.openai.OpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.external.response.openai.OpenAiEmbeddingsResponseEntity; @@ -27,9 +27,9 @@ import static org.elasticsearch.xpack.inference.common.Truncator.truncate; -public class OpenAiEmbeddingsExecutableRequestCreator implements ExecutableRequestCreator { +public class OpenAiEmbeddingsRequestManager extends OpenAiRequestManager { - private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsExecutableRequestCreator.class); + private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsRequestManager.class); private static final ResponseHandler HANDLER = createEmbeddingsHandler(); @@ -37,17 +37,20 @@ private static ResponseHandler createEmbeddingsHandler() { return new OpenAiResponseHandler("openai text embedding", OpenAiEmbeddingsResponseEntity::fromResponse); } + public static OpenAiEmbeddingsRequestManager of(OpenAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) { + return new OpenAiEmbeddingsRequestManager( + Objects.requireNonNull(model), + Objects.requireNonNull(truncator), + Objects.requireNonNull(threadPool) + ); + } + private final Truncator truncator; private final OpenAiEmbeddingsModel model; - private final OpenAiAccount account; - public OpenAiEmbeddingsExecutableRequestCreator(OpenAiEmbeddingsModel model, Truncator truncator) { + private OpenAiEmbeddingsRequestManager(OpenAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) { + super(threadPool, model, OpenAiEmbeddingsRequest::buildDefaultUri); this.model = Objects.requireNonNull(model); - this.account = new OpenAiAccount( - this.model.getServiceSettings().uri(), - this.model.getServiceSettings().organizationId(), - this.model.getSecretSettings().apiKey() - ); this.truncator = Objects.requireNonNull(truncator); } @@ -61,7 +64,7 @@ public Runnable create( ActionListener listener ) { var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); - OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, model); + OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest(truncator, truncatedInput, model); return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiRequestManager.java new file mode 100644 index 0000000000000..74c1199714807 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiRequestManager.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.services.openai.OpenAiModel; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Objects; + +abstract class OpenAiRequestManager extends BaseRequestManager { + + protected OpenAiRequestManager(ThreadPool threadPool, OpenAiModel model, CheckedSupplier uriBuilder) { + super(threadPool, model.getInferenceEntityId(), RateLimitGrouping.of(model, uriBuilder)); + } + + record RateLimitGrouping(int accountHash, int modelIdHash) { + public static RateLimitGrouping of(OpenAiModel model, CheckedSupplier uriBuilder) { + Objects.requireNonNull(model); + + return new RateLimitGrouping( + OpenAiAccount.of(model, uriBuilder).hashCode(), + model.rateLimitServiceSettings().modelId().hashCode() + ); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index 0a5ab8f87ef1b..d5a13c2e0771d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -265,7 +265,7 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE * @param listener an {@link ActionListener} for the response or failure */ public void execute( - ExecutableRequestCreator requestCreator, + RequestManager requestCreator, InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java similarity index 86% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java index dc279573d5c92..7d3cca596f1d0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestManager.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.ratelimit.RateLimitable; import java.util.List; import java.util.function.Supplier; @@ -19,7 +20,7 @@ /** * A contract for constructing a {@link Runnable} to handle sending an inference request to a 3rd party service. */ -public interface ExecutableRequestCreator { +public interface RequestManager extends RateLimitable { Runnable create( @Nullable String query, List input, @@ -28,4 +29,6 @@ Runnable create( HttpClientContext context, ActionListener listener ); + + String inferenceEntityId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 6628b9ef425e2..738592464232c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -26,13 +26,13 @@ class RequestTask implements RejectableTask { private final AtomicBoolean finished = new AtomicBoolean(); - private final ExecutableRequestCreator requestCreator; + private final RequestManager requestCreator; private final String query; private final List input; private final ActionListener listener; RequestTask( - ExecutableRequestCreator requestCreator, + RequestManager requestCreator, InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ThreadPool threadPool, @@ -111,7 +111,7 @@ public void onRejection(Exception e) { } @Override - public ExecutableRequestCreator getRequestCreator() { + public RequestManager getRequestCreator() { return requestCreator; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java index 40fd7836667d7..5a3af3d4a377f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/Sender.java @@ -18,7 +18,7 @@ public interface Sender extends Closeable { void start(); void send( - ExecutableRequestCreator requestCreator, + RequestManager requestCreator, InferenceInputs inferenceInputs, @Nullable TimeValue timeout, ActionListener listener diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java index 771c7b6adaead..a31a968a25a9e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceAccount.java @@ -8,14 +8,19 @@ package org.elasticsearch.xpack.inference.external.huggingface; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; import java.net.URI; import java.util.Objects; -public record HuggingFaceAccount(URI url, SecureString apiKey) { +public record HuggingFaceAccount(URI uri, SecureString apiKey) { + + public static HuggingFaceAccount of(HuggingFaceModel model) { + return new HuggingFaceAccount(model.rateLimitServiceSettings().uri(), model.apiKey()); + } public HuggingFaceAccount { - Objects.requireNonNull(url); + Objects.requireNonNull(uri); Objects.requireNonNull(apiKey); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiAccount.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiAccount.java index a89032277ff8d..07ccf298a0bd3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiAccount.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiAccount.java @@ -7,15 +7,27 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.inference.services.openai.OpenAiModel; import java.net.URI; +import java.net.URISyntaxException; import java.util.Objects; -public record OpenAiAccount(@Nullable URI url, @Nullable String organizationId, SecureString apiKey) { +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; + +public record OpenAiAccount(URI uri, @Nullable String organizationId, SecureString apiKey) { + + public static OpenAiAccount of(OpenAiModel model, CheckedSupplier uriBuilder) { + var uri = buildUri(model.rateLimitServiceSettings().uri(), "OpenAI", uriBuilder); + + return new OpenAiAccount(uri, model.rateLimitServiceSettings().organizationId(), model.apiKey()); + } public OpenAiAccount { + Objects.requireNonNull(uri); Objects.requireNonNull(apiKey); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ratelimit/RateLimitable.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ratelimit/RateLimitable.java new file mode 100644 index 0000000000000..e6f0d5371d030 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ratelimit/RateLimitable.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.ratelimit; + +/** + * Defines the contract for the settings and grouping of requests for how they are rate limited. + */ +public interface RateLimitable { + + // TODO add a method for retrieving the rate limit settings that would be used to instantiate a RateLimiter + + /** + * Returns an object responsible for containing the all the fields that uniquely identify how a request will be rate limited. + * In practice the class should contain things like api key, url, model, or any headers that would impact rate limiting. + * The class must implement hashcode such that these fields are taken into account. + * + * The returned object defines the bucket that a request should be placed when determine how it is rate limited. + */ + Object rateLimitGrouping(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index c943d5f54b4ff..f60d0130a01b6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -39,14 +39,9 @@ public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { private final URI uri; private final AzureOpenAiEmbeddingsModel model; - public AzureOpenAiEmbeddingsRequest( - Truncator truncator, - AzureOpenAiAccount account, - Truncator.TruncationResult input, - AzureOpenAiEmbeddingsModel model - ) { + public AzureOpenAiEmbeddingsRequest(Truncator truncator, Truncator.TruncationResult input, AzureOpenAiEmbeddingsModel model) { this.truncator = Objects.requireNonNull(truncator); - this.account = Objects.requireNonNull(account); + this.account = AzureOpenAiAccount.fromModel(model); this.truncationResult = Objects.requireNonNull(input); this.model = Objects.requireNonNull(model); this.uri = model.getUri(); @@ -100,7 +95,7 @@ public String getInferenceEntityId() { public Request truncate() { var truncatedInput = truncator.truncate(truncationResult.input()); - return new AzureOpenAiEmbeddingsRequest(truncator, account, truncatedInput, model); + return new AzureOpenAiEmbeddingsRequest(truncator, truncatedInput, model); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java index 45f25a4dd35f5..5f3278788b69b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java @@ -26,25 +26,22 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; public class CohereEmbeddingsRequest implements Request { private final CohereAccount account; private final List input; - private final URI uri; private final CohereEmbeddingsTaskSettings taskSettings; private final String model; private final CohereEmbeddingType embeddingType; private final String inferenceEntityId; - public CohereEmbeddingsRequest(CohereAccount account, List input, CohereEmbeddingsModel embeddingsModel) { + public CohereEmbeddingsRequest(List input, CohereEmbeddingsModel embeddingsModel) { Objects.requireNonNull(embeddingsModel); - this.account = Objects.requireNonNull(account); + account = CohereAccount.of(embeddingsModel, CohereEmbeddingsRequest::buildDefaultUri); this.input = Objects.requireNonNull(input); - uri = buildUri(this.account.url(), "Cohere", CohereEmbeddingsRequest::buildDefaultUri); taskSettings = embeddingsModel.getTaskSettings(); model = embeddingsModel.getServiceSettings().getCommonSettings().modelId(); embeddingType = embeddingsModel.getServiceSettings().getEmbeddingType(); @@ -53,7 +50,7 @@ public CohereEmbeddingsRequest(CohereAccount account, List input, Cohere @Override public HttpRequest createHttpRequest() { - HttpPost httpPost = new HttpPost(uri); + HttpPost httpPost = new HttpPost(account.uri()); ByteArrayEntity byteEntity = new ByteArrayEntity( Strings.toString(new CohereEmbeddingsRequestEntity(input, taskSettings, model, embeddingType)).getBytes(StandardCharsets.UTF_8) @@ -74,7 +71,7 @@ public String getInferenceEntityId() { @Override public URI getURI() { - return uri; + return account.uri(); } @Override @@ -87,8 +84,7 @@ public boolean[] getTruncationInfo() { return null; } - // default for testing - static URI buildDefaultUri() throws URISyntaxException { + public static URI buildDefaultUri() throws URISyntaxException { return new URIBuilder().setScheme("https") .setHost(CohereUtils.HOST) .setPathSegments(CohereUtils.VERSION_1, CohereUtils.EMBEDDINGS_PATH) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java index b8f3916582bf2..f87bdb9ab7d4b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereRerankRequest.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; public class CohereRerankRequest implements Request { @@ -33,18 +32,16 @@ public class CohereRerankRequest implements Request { private final CohereAccount account; private final String query; private final List input; - private final URI uri; private final CohereRerankTaskSettings taskSettings; private final String model; private final String inferenceEntityId; - public CohereRerankRequest(CohereAccount account, String query, List input, CohereRerankModel model) { + public CohereRerankRequest(String query, List input, CohereRerankModel model) { Objects.requireNonNull(model); - this.account = Objects.requireNonNull(account); + this.account = CohereAccount.of(model, CohereRerankRequest::buildDefaultUri); this.input = Objects.requireNonNull(input); this.query = Objects.requireNonNull(query); - uri = buildUri(this.account.url(), "Cohere", CohereRerankRequest::buildDefaultUri); taskSettings = model.getTaskSettings(); this.model = model.getServiceSettings().getCommonSettings().modelId(); inferenceEntityId = model.getInferenceEntityId(); @@ -52,7 +49,7 @@ public CohereRerankRequest(CohereAccount account, String query, List inp @Override public HttpRequest createHttpRequest() { - HttpPost httpPost = new HttpPost(uri); + HttpPost httpPost = new HttpPost(account.uri()); ByteArrayEntity byteEntity = new ByteArrayEntity( Strings.toString(new CohereRerankRequestEntity(query, input, taskSettings, model)).getBytes(StandardCharsets.UTF_8) @@ -73,7 +70,7 @@ public String getInferenceEntityId() { @Override public URI getURI() { - return uri; + return account.uri(); } @Override @@ -86,8 +83,7 @@ public boolean[] getTruncationInfo() { return null; } - // default for testing - static URI buildDefaultUri() throws URISyntaxException { + public static URI buildDefaultUri() throws URISyntaxException { return new URIBuilder().setScheme("https") .setHost(CohereUtils.HOST) .setPathSegments(CohereUtils.VERSION_1, CohereUtils.RERANK_PATH) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java index cd4fef6f0e827..74427d7dbc211 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java @@ -31,20 +31,15 @@ public class HuggingFaceInferenceRequest implements Request { private final Truncator.TruncationResult truncationResult; private final HuggingFaceModel model; - public HuggingFaceInferenceRequest( - Truncator truncator, - HuggingFaceAccount account, - Truncator.TruncationResult input, - HuggingFaceModel model - ) { + public HuggingFaceInferenceRequest(Truncator truncator, Truncator.TruncationResult input, HuggingFaceModel model) { this.truncator = Objects.requireNonNull(truncator); - this.account = Objects.requireNonNull(account); + this.account = HuggingFaceAccount.of(model); this.truncationResult = Objects.requireNonNull(input); this.model = Objects.requireNonNull(model); } public HttpRequest createHttpRequest() { - HttpPost httpPost = new HttpPost(account.url()); + HttpPost httpPost = new HttpPost(account.uri()); ByteArrayEntity byteEntity = new ByteArrayEntity( Strings.toString(new HuggingFaceInferenceRequestEntity(truncationResult.input())).getBytes(StandardCharsets.UTF_8) @@ -57,7 +52,7 @@ public HttpRequest createHttpRequest() { } public URI getURI() { - return account.url(); + return account.uri(); } @Override @@ -69,7 +64,7 @@ public String getInferenceEntityId() { public Request truncate() { var truncateResult = truncator.truncate(truncationResult.input()); - return new HuggingFaceInferenceRequest(truncator, account, truncateResult, model); + return new HuggingFaceInferenceRequest(truncator, truncateResult, model); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java index e53d4e7362735..9fa6533161745 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; @@ -32,19 +31,17 @@ public class OpenAiChatCompletionRequest implements OpenAiRequest { private final OpenAiAccount account; private final List input; - private final URI uri; private final OpenAiChatCompletionModel model; - public OpenAiChatCompletionRequest(OpenAiAccount account, List input, OpenAiChatCompletionModel model) { - this.account = Objects.requireNonNull(account); + public OpenAiChatCompletionRequest(List input, OpenAiChatCompletionModel model) { + this.account = OpenAiAccount.of(model, OpenAiChatCompletionRequest::buildDefaultUri); this.input = Objects.requireNonNull(input); - this.uri = buildUri(this.account.url(), "OpenAI", OpenAiChatCompletionRequest::buildDefaultUri); this.model = Objects.requireNonNull(model); } @Override public HttpRequest createHttpRequest() { - HttpPost httpPost = new HttpPost(uri); + HttpPost httpPost = new HttpPost(account.uri()); ByteArrayEntity byteEntity = new ByteArrayEntity( Strings.toString( @@ -66,7 +63,7 @@ public HttpRequest createHttpRequest() { @Override public URI getURI() { - return uri; + return account.uri(); } @Override @@ -86,8 +83,7 @@ public String getInferenceEntityId() { return model.getInferenceEntityId(); } - // default for testing - static URI buildDefaultUri() throws URISyntaxException { + public static URI buildDefaultUri() throws URISyntaxException { return new URIBuilder().setScheme("https") .setHost(OpenAiUtils.HOST) .setPathSegments(OpenAiUtils.VERSION_1, OpenAiUtils.CHAT_PATH, OpenAiUtils.COMPLETIONS_PATH) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java index df5d3024fd483..f82e7ff3f5260 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java @@ -24,7 +24,6 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; @@ -33,24 +32,17 @@ public class OpenAiEmbeddingsRequest implements OpenAiRequest { private final Truncator truncator; private final OpenAiAccount account; private final Truncator.TruncationResult truncationResult; - private final URI uri; private final OpenAiEmbeddingsModel model; - public OpenAiEmbeddingsRequest( - Truncator truncator, - OpenAiAccount account, - Truncator.TruncationResult input, - OpenAiEmbeddingsModel model - ) { + public OpenAiEmbeddingsRequest(Truncator truncator, Truncator.TruncationResult input, OpenAiEmbeddingsModel model) { this.truncator = Objects.requireNonNull(truncator); - this.account = Objects.requireNonNull(account); + this.account = OpenAiAccount.of(model, OpenAiEmbeddingsRequest::buildDefaultUri); this.truncationResult = Objects.requireNonNull(input); - this.uri = buildUri(this.account.url(), "OpenAI", OpenAiEmbeddingsRequest::buildDefaultUri); this.model = Objects.requireNonNull(model); } public HttpRequest createHttpRequest() { - HttpPost httpPost = new HttpPost(uri); + HttpPost httpPost = new HttpPost(account.uri()); ByteArrayEntity byteEntity = new ByteArrayEntity( Strings.toString( @@ -83,14 +75,14 @@ public String getInferenceEntityId() { @Override public URI getURI() { - return uri; + return account.uri(); } @Override public Request truncate() { var truncatedInput = truncator.truncate(truncationResult.input()); - return new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, model); + return new OpenAiEmbeddingsRequest(truncator, truncatedInput, model); } @Override @@ -98,8 +90,7 @@ public boolean[] getTruncationInfo() { return truncationResult.truncated().clone(); } - // default for testing - static URI buildDefaultUri() throws URISyntaxException { + public static URI buildDefaultUri() throws URISyntaxException { return new URIBuilder().setScheme("https") .setHost(OpenAiUtils.HOST) .setPathSegments(OpenAiUtils.VERSION_1, OpenAiUtils.EMBEDDINGS_PATH) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 1631755149578..329e7664e5d4e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.TextEmbedding; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; import java.net.URI; import java.net.URISyntaxException; @@ -402,4 +403,9 @@ public static void getEmbeddingSize(Model model, InferenceService service, Actio } private static final String TEST_EMBEDDING_INPUT = "how big"; + + public static SecureString apiKey(@Nullable ApiKeySecrets secrets) { + // To avoid a possible null pointer throughout the code we'll create a noop api key of an empty array + return secrets == null ? new SecureString(new char[0]) : secrets.apiKey(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java index 66070cab0e517..5e50229e25643 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java @@ -17,23 +17,35 @@ import java.net.URI; import java.util.Map; +import java.util.Objects; public abstract class AzureOpenAiModel extends Model { protected URI uri; + private final AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings; - public AzureOpenAiModel(ModelConfigurations configurations, ModelSecrets secrets) { + public AzureOpenAiModel( + ModelConfigurations configurations, + ModelSecrets secrets, + AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings + ) { super(configurations, secrets); + + this.rateLimitServiceSettings = Objects.requireNonNull(rateLimitServiceSettings); } protected AzureOpenAiModel(AzureOpenAiModel model, TaskSettings taskSettings) { super(model, taskSettings); + this.uri = model.getUri(); + rateLimitServiceSettings = model.rateLimitServiceSettings(); } protected AzureOpenAiModel(AzureOpenAiModel model, ServiceSettings serviceSettings) { super(model, serviceSettings); + this.uri = model.getUri(); + rateLimitServiceSettings = model.rateLimitServiceSettings(); } public abstract ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings); @@ -46,4 +58,8 @@ public URI getUri() { public void setUri(URI newUri) { this.uri = newUri; } + + public AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings() { + return rateLimitServiceSettings; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiRateLimitServiceSettings.java new file mode 100644 index 0000000000000..9a474c9059f32 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiRateLimitServiceSettings.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai; + +public interface AzureOpenAiRateLimitServiceSettings { + String resourceName(); + + String deploymentId(); + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 4c3272013f0e2..93d1e31a3bed1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -64,7 +64,11 @@ public AzureOpenAiEmbeddingsModel( AzureOpenAiEmbeddingsTaskSettings taskSettings, @Nullable AzureOpenAiSecretSettings secrets ) { - super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); try { this.uri = getEmbeddingsUri(serviceSettings.resourceName(), serviceSettings.deploymentId(), serviceSettings.apiVersion()); } catch (URISyntaxException e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java index c3d9e3eb69a5d..4153aef9cd746 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java @@ -21,6 +21,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; import java.io.IOException; import java.util.Map; @@ -40,7 +41,7 @@ /** * Defines the service settings for interacting with OpenAI's text embedding models. */ -public class AzureOpenAiEmbeddingsServiceSettings implements ServiceSettings { +public class AzureOpenAiEmbeddingsServiceSettings implements ServiceSettings, AzureOpenAiRateLimitServiceSettings { public static final String NAME = "azure_openai_embeddings_service_settings"; @@ -164,10 +165,12 @@ private AzureOpenAiEmbeddingsServiceSettings(CommonFields fields) { ); } + @Override public String resourceName() { return resourceName; } + @Override public String deploymentId() { return deploymentId; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java index 81a27e1e536f3..fdff730a83a31 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereModel.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.services.cohere; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; @@ -15,21 +17,38 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionVisitor; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; +import java.net.URI; import java.util.Map; public abstract class CohereModel extends Model { - public CohereModel(ModelConfigurations configurations, ModelSecrets secrets) { + private final SecureString apiKey; + + public CohereModel(ModelConfigurations configurations, ModelSecrets secrets, @Nullable ApiKeySecrets apiKeySecrets) { super(configurations, secrets); + + apiKey = ServiceUtils.apiKey(apiKeySecrets); } protected CohereModel(CohereModel model, TaskSettings taskSettings) { super(model, taskSettings); + + apiKey = model.apiKey(); } protected CohereModel(CohereModel model, ServiceSettings serviceSettings) { super(model, serviceSettings); + + apiKey = model.apiKey(); + } + + public SecureString apiKey() { + return apiKey; } public abstract ExecutableAction accept(CohereActionVisitor creator, Map taskSettings, InputType inputType); + + public abstract URI uri(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java index be25361724c1b..c1c46787a60ac 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import java.net.URI; import java.util.Map; public class CohereEmbeddingsModel extends CohereModel { @@ -54,7 +55,11 @@ public CohereEmbeddingsModel( CohereEmbeddingsTaskSettings taskSettings, @Nullable DefaultSecretSettings secretSettings ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + super( + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secretSettings), + secretSettings + ); } private CohereEmbeddingsModel(CohereEmbeddingsModel model, CohereEmbeddingsTaskSettings taskSettings) { @@ -84,4 +89,9 @@ public DefaultSecretSettings getSecretSettings() { public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings, InputType inputType) { return visitor.create(this, taskSettings, inputType); } + + @Override + public URI uri() { + return getServiceSettings().getCommonSettings().uri(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java index 78e0e419c418d..2a02cab606889 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankModel.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereModel; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import java.net.URI; import java.util.Map; public class CohereRerankModel extends CohereModel { @@ -54,7 +55,11 @@ public CohereRerankModel( CohereRerankTaskSettings taskSettings, @Nullable DefaultSecretSettings secretSettings ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + super( + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secretSettings), + secretSettings + ); } private CohereRerankModel(CohereRerankModel model, CohereRerankTaskSettings taskSettings) { @@ -91,4 +96,9 @@ public DefaultSecretSettings getSecretSettings() { public ExecutableAction accept(CohereActionVisitor visitor, Map taskSettings, InputType inputType) { return visitor.create(this, taskSettings); } + + @Override + public URI uri() { + return getServiceSettings().getCommonSettings().uri(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java index 82076c865fee8..7e51688dcbc13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java @@ -8,24 +8,42 @@ package org.elasticsearch.xpack.inference.services.huggingface; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; -import java.net.URI; +import java.util.Objects; public abstract class HuggingFaceModel extends Model { - public HuggingFaceModel(ModelConfigurations configurations, ModelSecrets secrets) { + private final HuggingFaceRateLimitServiceSettings rateLimitServiceSettings; + private final SecureString apiKey; + + public HuggingFaceModel( + ModelConfigurations configurations, + ModelSecrets secrets, + HuggingFaceRateLimitServiceSettings rateLimitServiceSettings, + @Nullable ApiKeySecrets apiKeySecrets + ) { super(configurations, secrets); + this.rateLimitServiceSettings = Objects.requireNonNull(rateLimitServiceSettings); + apiKey = ServiceUtils.apiKey(apiKeySecrets); } - public abstract ExecutableAction accept(HuggingFaceActionVisitor creator); - - public abstract URI getUri(); + public HuggingFaceRateLimitServiceSettings rateLimitServiceSettings() { + return rateLimitServiceSettings; + } - public abstract SecureString getApiKey(); + public SecureString apiKey() { + return apiKey; + } public abstract Integer getTokenLimit(); + + public abstract ExecutableAction accept(HuggingFaceActionVisitor creator); + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceRateLimitServiceSettings.java new file mode 100644 index 0000000000000..51f034275c13d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceRateLimitServiceSettings.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import java.net.URI; + +/** + * The service setting fields for hugging face that determine how to rate limit requests. + */ +public interface HuggingFaceRateLimitServiceSettings { + URI uri(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index b151e9c800a74..03a02cca44d74 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -35,7 +35,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public class HuggingFaceServiceSettings implements ServiceSettings { +public class HuggingFaceServiceSettings implements ServiceSettings, HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_service_settings"; public static HuggingFaceServiceSettings fromMap(Map map) { @@ -141,6 +141,7 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override public URI uri() { return uri; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java index 2968566208624..8a947ce9a024b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.huggingface.elser; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -16,7 +15,6 @@ import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -import java.net.URI; import java.util.Map; public class HuggingFaceElserModel extends HuggingFaceModel { @@ -43,7 +41,12 @@ public HuggingFaceElserModel( HuggingFaceElserServiceSettings serviceSettings, @Nullable HuggingFaceElserSecretSettings secretSettings ) { - super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings), new ModelSecrets(secretSettings)); + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings), + new ModelSecrets(secretSettings), + serviceSettings, + secretSettings + ); } @Override @@ -61,16 +64,6 @@ public ExecutableAction accept(HuggingFaceActionVisitor creator) { return creator.create(this); } - @Override - public URI getUri() { - return getServiceSettings().uri(); - } - - @Override - public SecureString getApiKey() { - return getSecretSettings().apiKey(); - } - @Override public Integer getTokenLimit() { return getServiceSettings().maxInputTokens(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java index e6560a9dd1af6..48c8997f2a1bd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; import java.io.IOException; import java.util.Map; @@ -24,7 +25,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; -public record HuggingFaceElserSecretSettings(SecureString apiKey) implements SecretSettings { +public record HuggingFaceElserSecretSettings(SecureString apiKey) implements SecretSettings, ApiKeySecrets { public static final String NAME = "hugging_face_elser_secret_settings"; static final String API_KEY = "api_key"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index 6949c0917bcec..5b382919fd00f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -15,6 +15,7 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceRateLimitServiceSettings; import java.io.IOException; import java.net.URI; @@ -25,7 +26,10 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; -public record HuggingFaceElserServiceSettings(URI uri, Integer maxInputTokens) implements ServiceSettings { +public record HuggingFaceElserServiceSettings(URI uri, Integer maxInputTokens) + implements + ServiceSettings, + HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_elser_service_settings"; private static final Integer ELSER_TOKEN_LIMIT = 512; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java index 351173de95cc7..1cee26558b490 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.huggingface.embeddings; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -18,7 +17,6 @@ import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; -import java.net.URI; import java.util.Map; public class HuggingFaceEmbeddingsModel extends HuggingFaceModel { @@ -46,7 +44,12 @@ public HuggingFaceEmbeddingsModel( HuggingFaceServiceSettings serviceSettings, @Nullable DefaultSecretSettings secrets ) { - super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings), new ModelSecrets(secrets)); + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings), + new ModelSecrets(secrets), + serviceSettings, + secrets + ); } public HuggingFaceEmbeddingsModel(HuggingFaceEmbeddingsModel model, HuggingFaceServiceSettings serviceSettings) { @@ -69,16 +72,6 @@ public DefaultSecretSettings getSecretSettings() { return (DefaultSecretSettings) super.getSecretSettings(); } - @Override - public URI getUri() { - return getServiceSettings().uri(); - } - - @Override - public SecureString getApiKey() { - return getSecretSettings().apiKey(); - } - @Override public Integer getTokenLimit() { return getServiceSettings().maxInputTokens(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java index 1e158725f531d..caf09de31794e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiModel.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.services.openai; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -14,21 +16,49 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionVisitor; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; import java.util.Map; +import java.util.Objects; public abstract class OpenAiModel extends Model { - public OpenAiModel(ModelConfigurations configurations, ModelSecrets secrets) { + private final OpenAiRateLimitServiceSettings rateLimitServiceSettings; + private final SecureString apiKey; + + public OpenAiModel( + ModelConfigurations configurations, + ModelSecrets secrets, + OpenAiRateLimitServiceSettings rateLimitServiceSettings, + @Nullable ApiKeySecrets apiKeySecrets + ) { super(configurations, secrets); + + this.rateLimitServiceSettings = Objects.requireNonNull(rateLimitServiceSettings); + apiKey = ServiceUtils.apiKey(apiKeySecrets); } protected OpenAiModel(OpenAiModel model, TaskSettings taskSettings) { super(model, taskSettings); + + rateLimitServiceSettings = model.rateLimitServiceSettings(); + apiKey = model.apiKey(); } protected OpenAiModel(OpenAiModel model, ServiceSettings serviceSettings) { super(model, serviceSettings); + + rateLimitServiceSettings = model.rateLimitServiceSettings(); + apiKey = model.apiKey(); + } + + public SecureString apiKey() { + return apiKey; + } + + public OpenAiRateLimitServiceSettings rateLimitServiceSettings() { + return rateLimitServiceSettings; } public abstract ExecutableAction accept(OpenAiActionVisitor creator, Map taskSettings); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiRateLimitServiceSettings.java new file mode 100644 index 0000000000000..be7378fd83f50 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiRateLimitServiceSettings.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai; + +import java.net.URI; + +/** + * The service setting fields for openai that determine how to rate limit requests. + */ +public interface OpenAiRateLimitServiceSettings { + String modelId(); + + URI uri(); + + String organizationId(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java index 467c4f44f34fe..b1b670c0911f5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java @@ -55,7 +55,12 @@ public OpenAiChatCompletionModel( OpenAiChatCompletionTaskSettings taskSettings, @Nullable DefaultSecretSettings secrets ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + super( + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings, + secrets + ); } private OpenAiChatCompletionModel(OpenAiChatCompletionModel originalModel, OpenAiChatCompletionTaskSettings taskSettings) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 16b0ed5d47039..7a8bafb8b1091 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -17,6 +17,7 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; import java.io.IOException; import java.net.URI; @@ -36,7 +37,7 @@ /** * Defines the service settings for interacting with OpenAI's chat completion models. */ -public class OpenAiChatCompletionServiceSettings implements ServiceSettings { +public class OpenAiChatCompletionServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_completion_service_settings"; @@ -94,14 +95,17 @@ public OpenAiChatCompletionServiceSettings(StreamInput in) throws IOException { this.maxInputTokens = in.readOptionalVInt(); } + @Override public String modelId() { return modelId; } + @Override public URI uri() { return uri; } + @Override public String organizationId() { return organizationId; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index e822fa069598f..18a1d8a5b658f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -58,7 +58,12 @@ public OpenAiEmbeddingsModel( OpenAiEmbeddingsTaskSettings taskSettings, @Nullable DefaultSecretSettings secrets ) { - super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings, + secrets + ); } private OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiEmbeddingsTaskSettings taskSettings) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 1e5c93ea9ae22..e3fc02580cf04 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -21,6 +21,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.OpenAiRateLimitServiceSettings; import java.io.IOException; import java.net.URI; @@ -43,7 +44,7 @@ /** * Defines the service settings for interacting with OpenAI's text embedding models. */ -public class OpenAiEmbeddingsServiceSettings implements ServiceSettings { +public class OpenAiEmbeddingsServiceSettings implements ServiceSettings, OpenAiRateLimitServiceSettings { public static final String NAME = "openai_service_settings"; @@ -184,10 +185,12 @@ private OpenAiEmbeddingsServiceSettings(CommonFields fields, Boolean dimensionsS ); } + @Override public URI uri() { return uri; } + @Override public String organizationId() { return organizationId; } @@ -210,6 +213,7 @@ public Integer maxInputTokens() { return maxInputTokens; } + @Override public String modelId() { return modelId; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/ApiKeySecrets.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/ApiKeySecrets.java new file mode 100644 index 0000000000000..687a8b378b1ff --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/ApiKeySecrets.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.settings; + +import org.elasticsearch.common.settings.SecureString; + +public interface ApiKeySecrets { + SecureString apiKey(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java index 8587f254c0b4d..6affa998c089d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java @@ -28,7 +28,7 @@ * Contains secret settings that are common to all services. * @param apiKey the key used to authenticate with the 3rd party service */ -public record DefaultSecretSettings(SecureString apiKey) implements SecretSettings { +public record DefaultSecretSettings(SecureString apiKey) implements SecretSettings, ApiKeySecrets { public static final String NAME = "default_secret_settings"; static final String API_KEY = "api_key"; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java index da81007b7d2bd..06cae11bc8d5d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsActionTests.java @@ -352,7 +352,7 @@ private CohereEmbeddingsAction createAction( ) { var model = CohereEmbeddingsModelTests.createModel(url, apiKey, taskSettings, 1024, 1024, modelName, embeddingType); - return new CohereEmbeddingsAction(sender, model); + return new CohereEmbeddingsAction(sender, model, threadPool); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java index 9a85b00c01485..31297ed432ef5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/ExecutableRequestCreatorTests.java @@ -22,19 +22,19 @@ import static org.mockito.Mockito.when; public class ExecutableRequestCreatorTests { - public static ExecutableRequestCreator createMock() { - var mockCreator = mock(ExecutableRequestCreator.class); + public static RequestManager createMock() { + var mockCreator = mock(RequestManager.class); when(mockCreator.create(any(), anyList(), any(), any(), any(), any())).thenReturn(() -> {}); return mockCreator; } - public static ExecutableRequestCreator createMock(RequestSender requestSender) { + public static RequestManager createMock(RequestSender requestSender) { return createMock(requestSender, "id"); } - public static ExecutableRequestCreator createMock(RequestSender requestSender, String modelId) { - var mockCreator = mock(ExecutableRequestCreator.class); + public static RequestManager createMock(RequestSender requestSender, String modelId) { + var mockCreator = mock(RequestManager.class); doAnswer(invocation -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java index b7a07242c6d8d..395c046413504 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderTests.java @@ -106,7 +106,7 @@ public void testCreateSender_SendsRequestAndReceivesResponse() throws Exception PlainActionFuture listener = new PlainActionFuture<>(); sender.send( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator(getUrl(webServer), null, "key", "model", null), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator(getUrl(webServer), null, "key", "model", null, threadPool), new DocumentsOnlyInput(List.of("abc")), null, listener diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreatorTests.java index 53537a3ff77c2..37fce8d3f3a7b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiEmbeddingsExecutableRequestCreatorTests.java @@ -8,33 +8,36 @@ package org.elasticsearch.xpack.inference.external.http.sender; import org.elasticsearch.core.Nullable; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.common.TruncatorTests; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; public class OpenAiEmbeddingsExecutableRequestCreatorTests { - public static OpenAiEmbeddingsExecutableRequestCreator makeCreator( + public static OpenAiEmbeddingsRequestManager makeCreator( String url, @Nullable String org, String apiKey, String modelName, - @Nullable String user + @Nullable String user, + ThreadPool threadPool ) { var model = createModel(url, org, apiKey, modelName, user); - return new OpenAiEmbeddingsExecutableRequestCreator(model, TruncatorTests.createTruncator()); + return OpenAiEmbeddingsRequestManager.of(model, TruncatorTests.createTruncator(), threadPool); } - public static OpenAiEmbeddingsExecutableRequestCreator makeCreator( + public static OpenAiEmbeddingsRequestManager makeCreator( String url, @Nullable String org, String apiKey, String modelName, @Nullable String user, - String inferenceEntityId + String inferenceEntityId, + ThreadPool threadPool ) { var model = createModel(url, org, apiKey, modelName, user, inferenceEntityId); - return new OpenAiEmbeddingsExecutableRequestCreator(model, TruncatorTests.createTruncator()); + return OpenAiEmbeddingsRequestManager.of(model, TruncatorTests.createTruncator(), threadPool); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index 24a261dfe47c4..ff88ba221d985 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -109,7 +109,7 @@ public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { PlainActionFuture listener = new PlainActionFuture<>(); service.execute( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null, threadPool), new DocumentsOnlyInput(List.of()), null, listener @@ -179,7 +179,7 @@ public void testTaskThrowsError_CallsOnFailure() { PlainActionFuture listener = new PlainActionFuture<>(); service.execute( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "id", null, threadPool), new DocumentsOnlyInput(List.of()), null, listener diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index 14a7e28eb84db..13c395180cd16 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -59,7 +59,7 @@ public void testExecuting_DoesNotCallOnFailureForTimeout_AfterIllegalArgumentExc ActionListener listener = mock(ActionListener.class); var requestTask = new RequestTask( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id", threadPool), new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), mockThreadPool, @@ -79,7 +79,7 @@ public void testRequest_ReturnsTimeoutException() { PlainActionFuture listener = new PlainActionFuture<>(); var requestTask = new RequestTask( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id", threadPool), new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, @@ -105,7 +105,7 @@ public void testRequest_DoesNotCallOnFailureTwiceWhenTimingOut() throws Exceptio }).when(listener).onFailure(any()); var requestTask = new RequestTask( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id", threadPool), new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, @@ -137,7 +137,7 @@ public void testRequest_DoesNotCallOnResponseAfterTimingOut() throws Exception { }).when(listener).onFailure(any()); var requestTask = new RequestTask( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id", threadPool), new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), threadPool, @@ -167,7 +167,7 @@ public void testRequest_DoesNotCallOnFailureForTimeout_AfterAlreadyCallingOnResp ActionListener listener = mock(ActionListener.class); var requestTask = new RequestTask( - OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id"), + OpenAiEmbeddingsExecutableRequestCreatorTests.makeCreator("url", null, "key", "model", null, "id", threadPool), new DocumentsOnlyInput(List.of("abc")), TimeValue.timeValueMillis(1), mockThreadPool, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java index ab8bf244a4d2c..55965bc2354d3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/SingleRequestManagerTests.java @@ -17,7 +17,7 @@ public class SingleRequestManagerTests extends ESTestCase { public void testExecute_DoesNotCallRequestCreatorCreate_WhenInputIsNull() { - var requestCreator = mock(ExecutableRequestCreator.class); + var requestCreator = mock(RequestManager.class); var request = mock(InferenceRequest.class); when(request.getRequestCreator()).thenReturn(requestCreator); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java index 8e7c831a9820f..88e6880b72f0b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiAccount; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests; @@ -106,11 +105,8 @@ public static AzureOpenAiEmbeddingsRequest createRequest( entraId, "id" ); - var account = AzureOpenAiAccount.fromModel(embeddingsModel); - return new AzureOpenAiEmbeddingsRequest( TruncatorTests.createTruncator(), - account, new Truncator.TruncationResult(List.of(input), new boolean[] { false }), embeddingsModel ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java index 32911eeb44adf..d30b809603eef 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.inference.external.cohere.CohereAccount; import org.elasticsearch.xpack.inference.services.cohere.CohereTruncation; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; @@ -178,7 +177,6 @@ public void testCreateRequest_TruncateNone() throws IOException { } public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) { - var account = new CohereAccount(model.getServiceSettings().getCommonSettings().uri(), model.getSecretSettings().apiKey()); - return new CohereEmbeddingsRequest(account, input, model); + return new CohereEmbeddingsRequest(input, model); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java index 469ea28d42e5d..50fa769298f8c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java @@ -9,12 +9,10 @@ import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModelTests; import java.io.IOException; @@ -70,11 +68,9 @@ public void testIsTruncated_ReturnsTrue() throws URISyntaxException, IOException } public static HuggingFaceInferenceRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { - var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); return new HuggingFaceInferenceRequest( TruncatorTests.createTruncator(), - account, new Truncator.TruncationResult(List.of(input), new boolean[] { false }), HuggingFaceEmbeddingsModelTests.createModel(url, apiKey) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java index 7858bdf4d1259..b71508021eddd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; import java.io.IOException; @@ -120,13 +119,7 @@ public static OpenAiChatCompletionRequest createRequest( @Nullable String user ) { var chatCompletionModel = OpenAiChatCompletionModelTests.createChatCompletionModel(url, org, apiKey, model, user); - - var account = new OpenAiAccount( - chatCompletionModel.getServiceSettings().uri(), - org, - chatCompletionModel.getSecretSettings().apiKey() - ); - return new OpenAiChatCompletionRequest(account, List.of(input), chatCompletionModel); + return new OpenAiChatCompletionRequest(List.of(input), chatCompletionModel); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java index ebff1c5e096e8..935b27cfb688a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests; import java.io.IOException; @@ -118,10 +117,8 @@ public static OpenAiEmbeddingsRequest createRequest( ) { var embeddingsModel = OpenAiEmbeddingsModelTests.createModel(url, org, apiKey, model, user, (Integer) null); - var account = new OpenAiAccount(embeddingsModel.getServiceSettings().uri(), org, embeddingsModel.getSecretSettings().apiKey()); return new OpenAiEmbeddingsRequest( TruncatorTests.createTruncator(), - account, new Truncator.TruncationResult(List.of(input), new boolean[] { false }), embeddingsModel ); From ecacb2effbc48f191c2ac9f61b599626ea6cb4e3 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 16 Apr 2024 18:46:20 +0200 Subject: [PATCH 058/130] Consider total number of aggregations not number of non-empty aggregations (#107536) Test are failing because we are computing incorrectly the doc count error for terms aggregation. The only difference with the previous versions is that we are considering the number of empty aggregations instead of the total number of aggregations when computing this value. Making this change it makes the test happy. --- .../aggregations/bucket/terms/AbstractInternalTerms.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 11bd63bcdaa8c..af1cabdc27389 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -221,6 +221,7 @@ public final AggregatorReducer termsAggregationReducer(AggregationReduceContext private class TermsAggregationReducer implements AggregatorReducer { private final List> bucketsList; private final AggregationReduceContext reduceContext; + private final int size; private long sumDocCountError = 0; private final long[] otherDocCount = new long[] { 0 }; @@ -236,6 +237,7 @@ private class TermsAggregationReducer implements AggregatorReducer { private TermsAggregationReducer(AggregationReduceContext reduceContext, int size) { bucketsList = new ArrayList<>(size); this.reduceContext = reduceContext; + this.size = size; } @Override @@ -326,7 +328,7 @@ public InternalAggregation get() { if (sumDocCountError == -1) { docCountError = -1; } else { - docCountError = bucketsList.size() == 1 ? 0 : sumDocCountError; + docCountError = size == 1 ? 0 : sumDocCountError; } return create(name, result, reduceContext.isFinalReduce() ? getOrder() : thisReduceOrder, docCountError, otherDocCount[0]); } From e6d421c830b44caadffa6f2e4436e453cbdfb6df Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Tue, 16 Apr 2024 18:25:49 +0100 Subject: [PATCH 059/130] Add release highlight for #106133 (#107487) This commit adds a release highlight for #106133. --- docs/changelog/106133.yaml | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/docs/changelog/106133.yaml b/docs/changelog/106133.yaml index fe71992a0f4f0..6dd7bf6cea086 100644 --- a/docs/changelog/106133.yaml +++ b/docs/changelog/106133.yaml @@ -1,5 +1,19 @@ pr: 106133 -summary: Add an optimised vector distance function for aarch64 +summary: Add a SIMD (Neon) optimised vector distance function for int8 area: Search type: enhancement issues: [] +highlight: + title: A SIMD (Neon) optimised vector distance function for merging int8 Scalar Quantized vectors has been added + body: |- + An optimised int8 vector distance implementation for aarch64 has been added. + This implementation is currently only used during merging. + The vector distance implementation outperforms Lucene's Pamana Vector + implementation for binary comparisons by approx 5x (depending on the number + of dimensions). It does so by means of SIMD (Neon) intrinsics compiled into a + separate native library and link by Panama's FFI. Comparisons are performed on + off-heap mmap'ed vector data. + Macro benchmarks, SO_Dense_Vector with scalar quantization enabled, shows + significant improvements in merge times, approximately 3 times faster. + notable: true + From 73a17a17fe1cc80a6f436c207e3a8cc45bff9b60 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 16 Apr 2024 14:25:21 -0400 Subject: [PATCH 060/130] [ESQL] Moving argument compatibility checking for Equals (#107546) Fixed the conflicts, and re-submitting. Please see #105217 for full details, history, and discussion. I'll use the commit message from that PR as well. Continuing my work from #104490, this PR moves the parameter compatibility checking for Equals into the type resolution check. This is a somewhat bigger change than for Add, as there was no ES|QL base class for binary comparison operators before this. I've added EsqlBinaryComparison as that base class, and migrated all of the binary comparisons to be based off of that (except for NullEquals, see note below). In order to maintain compatibility with the current behavior, I've kept it so that unsigned longs are only inter-operable with other unsigned longs. We've talked a lot about changing that, and I consider this work a prerequisite for that. I've also added a bunch of test cases to Equals and NotEquals, which should have the side effect of filling out the type support table in the equals docs. As noted in the comments, I'll have follow up PRs for the other binary comparisons to add tests, but this PR is already too long. Note about NullEquals: There is an ES|QL NullEquals class, which inherits from the QL version, but I don't think it works. I didn't see any tests or docs for it, and trying it out in the demo instance gave me a syntax error. I think we need to delve into what's going on there, but this PR isn't the right place for it. This reverts commit 225edaf6076770385b4d091af89a546020ec5c79. --- docs/changelog/107537.yaml | 5 - .../src/main/resources/conditional.csv-spec | 7 + .../predicate/operator/comparison/Equals.java | 47 +++-- .../comparison/EsqlBinaryComparison.java | 164 +++++++++++++++ .../operator/comparison/GreaterThan.java | 33 ++- .../comparison/GreaterThanOrEqual.java | 34 ++- .../operator/comparison/LessThan.java | 36 ++-- .../operator/comparison/LessThanOrEqual.java | 31 ++- .../operator/comparison/NotEquals.java | 82 +++++--- .../DateTimeArithmeticOperation.java | 8 +- .../arithmetic/EsqlArithmeticOperation.java | 23 +- .../function/AbstractFunctionTestCase.java | 49 +++++ .../expression/function/TestCaseSupplier.java | 92 +++++++- .../operator/arithmetic/AddTests.java | 8 +- .../operator/arithmetic/DivTests.java | 18 +- .../operator/arithmetic/ModTests.java | 18 +- .../operator/comparison/EqualsTests.java | 198 +++++++++++++++--- .../comparison/GreaterThanOrEqualTests.java | 21 +- .../operator/comparison/GreaterThanTests.java | 21 +- .../comparison/LessThanOrEqualTests.java | 20 +- .../operator/comparison/LessThanTests.java | 20 +- .../operator/comparison/NotEqualsTests.java | 197 ++++++++++++++--- .../esql/optimizer/OptimizerRulesTests.java | 2 +- 23 files changed, 872 insertions(+), 262 deletions(-) delete mode 100644 docs/changelog/107537.yaml create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java diff --git a/docs/changelog/107537.yaml b/docs/changelog/107537.yaml deleted file mode 100644 index d6d502b394c3b..0000000000000 --- a/docs/changelog/107537.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 107537 -summary: "Revert \"[ES|QL] Moving argument compatibility checking for Equals\"" -area: ES|QL -type: bug -issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index f574722f691e5..64a8c1d9da316 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -156,6 +156,9 @@ nullOnMultivaluesComparisonOperation required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; +warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value + a:integer | b:integer | same:boolean 5 | [1, 2] | null @@ -166,6 +169,8 @@ notNullOnMultivaluesComparisonOperation required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; +warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value a:integer | b:integer | same:boolean ; @@ -175,6 +180,8 @@ notNullOnMultivaluesComparisonOperationWithPartialMatch required_feature: esql.disable_nullable_opts ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; +warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value a:integer | b:integer | same:boolean ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java index 9fb899b8e36df..62eec13af008a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/Equals.java @@ -8,33 +8,48 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; + +public class Equals extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.BOOLEAN, EqualsBoolsEvaluator.Factory::new), + Map.entry(DataTypes.INTEGER, EqualsIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, EqualsDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, EqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, EqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, EqualsLongsEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_POINT, EqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_POINT, EqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_SHAPE, EqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_SHAPE, EqualsGeometriesEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, EqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, EqualsKeywordsEvaluator.Factory::new) + ); -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; - -public class Equals extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals { public Equals(Source source, Expression left, Expression right) { - super(source, left, right); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.EQ, evaluatorMap); } public Equals(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); - } - - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.EQ, zoneId, evaluatorMap); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, Equals::new, left(), right(), zoneId()); } @@ -48,6 +63,11 @@ public Equals swapLeftAndRight() { return new Equals(source(), right(), left(), zoneId()); } + @Override + public BinaryComparison reverse() { + return this; + } + @Override public BinaryComparison negate() { return new NotEquals(source(), left(), right(), zoneId()); @@ -82,4 +102,5 @@ static boolean processBools(boolean lhs, boolean rhs) { static boolean processGeometries(BytesRef lhs, BytesRef rhs) { return lhs.equals(rhs); } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java new file mode 100644 index 0000000000000..58a808893c4c6 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; + +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.time.ZoneId; +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; + +public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { + + private final Map evaluatorMap; + + protected EsqlBinaryComparison( + Source source, + Expression left, + Expression right, + /* TODO: BinaryComparisonOperator is an enum with a bunch of functionality we don't really want. We should extract an interface and + create a symbol only version like we did for BinaryArithmeticOperation. Ideally, they could be the same class. + */ + BinaryComparisonProcessor.BinaryComparisonOperation operation, + Map evaluatorMap + ) { + this(source, left, right, operation, null, evaluatorMap); + } + + protected EsqlBinaryComparison( + Source source, + Expression left, + Expression right, + BinaryComparisonProcessor.BinaryComparisonOperation operation, + // TODO: We are definitely not doing the right thing with this zoneId + ZoneId zoneId, + Map evaluatorMap + ) { + super(source, left, right, operation, zoneId); + this.evaluatorMap = evaluatorMap; + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + // Our type is always boolean, so figure out the evaluator type from the inputs + DataType commonType = EsqlDataTypeRegistry.INSTANCE.commonType(left().dataType(), right().dataType()); + EvalOperator.ExpressionEvaluator.Factory lhs; + EvalOperator.ExpressionEvaluator.Factory rhs; + + if (commonType.isNumeric()) { + lhs = Cast.cast(source(), left().dataType(), commonType, toEvaluator.apply(left())); + rhs = Cast.cast(source(), right().dataType(), commonType, toEvaluator.apply(right())); + } else { + lhs = toEvaluator.apply(left()); + rhs = toEvaluator.apply(right()); + } + + if (evaluatorMap.containsKey(commonType) == false) { + throw new EsqlIllegalArgumentException("Unsupported type " + left().dataType()); + } + return evaluatorMap.get(commonType).apply(source(), lhs, rhs); + } + + @Override + public Boolean fold() { + return (Boolean) EvaluatorMapper.super.fold(); + } + + @Override + protected TypeResolution resolveType() { + TypeResolution typeResolution = super.resolveType(); + if (typeResolution.unresolved()) { + return typeResolution; + } + + return checkCompatibility(); + } + + @Override + protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { + return TypeResolutions.isType( + e, + evaluatorMap::containsKey, + sourceText(), + paramOrdinal, + evaluatorMap.keySet().stream().map(DataType::typeName).toArray(String[]::new) + ); + } + + /** + * Check if the two input types are compatible for this operation + * + * @return TypeResolution.TYPE_RESOLVED iff the types are compatible. Otherwise, an appropriate type resolution error. + */ + protected TypeResolution checkCompatibility() { + DataType leftType = left().dataType(); + DataType rightType = right().dataType(); + + // Unsigned long is only interoperable with other unsigned longs + if ((rightType == UNSIGNED_LONG && (false == (leftType == UNSIGNED_LONG || leftType == DataTypes.NULL))) + || (leftType == UNSIGNED_LONG && (false == (rightType == UNSIGNED_LONG || rightType == DataTypes.NULL)))) { + return new TypeResolution(formatIncompatibleTypesMessage()); + } + + if ((leftType.isNumeric() && rightType.isNumeric()) + || (DataTypes.isString(leftType) && DataTypes.isString(rightType)) + || leftType.equals(rightType) + || DataTypes.isNull(leftType) + || DataTypes.isNull(rightType)) { + return TypeResolution.TYPE_RESOLVED; + } + return new TypeResolution(formatIncompatibleTypesMessage()); + } + + public String formatIncompatibleTypesMessage() { + if (left().dataType().equals(UNSIGNED_LONG)) { + return format( + null, + "first argument of [{}] is [unsigned_long] and second is [{}]. " + + "[unsigned_long] can only be operated on together with another [unsigned_long]", + sourceText(), + right().dataType().typeName() + ); + } + if (right().dataType().equals(UNSIGNED_LONG)) { + return format( + null, + "first argument of [{}] is [{}] and second is [unsigned_long]. " + + "[unsigned_long] can only be operated on together with another [unsigned_long]", + sourceText(), + left().dataType().typeName() + ); + } + return format( + null, + "first argument of [{}] is [{}] so second argument must also be [{}] but was [{}]", + sourceText(), + left().dataType().isNumeric() ? "numeric" : left().dataType().typeName(), + left().dataType().isNumeric() ? "numeric" : left().dataType().typeName(), + right().dataType().typeName() + ); + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java index 5683a9d0d7e85..3eca0e858acbf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThan.java @@ -8,29 +8,42 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class GreaterThan extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, GreaterThanIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, GreaterThanDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, GreaterThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, GreaterThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, GreaterThanKeywordsEvaluator.Factory::new) + ); -public class GreaterThan extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThan { - public GreaterThan(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + public GreaterThan(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GT, evaluatorMap); } - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + public GreaterThan(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GT, zoneId, evaluatorMap); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, GreaterThan::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java index ebb29998fb995..f99a85420870b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqual.java @@ -8,30 +8,42 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class GreaterThanOrEqual extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, GreaterThanOrEqualIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, GreaterThanOrEqualDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, GreaterThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, GreaterThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, GreaterThanOrEqualKeywordsEvaluator.Factory::new) + ); -public class GreaterThanOrEqual extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.GreaterThanOrEqual { - - public GreaterThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + public GreaterThanOrEqual(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GTE, evaluatorMap); } - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); + public GreaterThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.GTE, zoneId, evaluatorMap); } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, GreaterThanOrEqual::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java index 12f54270b65dc..6b82df1d67da6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThan.java @@ -8,38 +8,44 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class LessThan extends EsqlBinaryComparison implements Negatable { -public class LessThan extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, LessThanIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, LessThanDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, LessThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, LessThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, LessThanLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, LessThanKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, LessThanKeywordsEvaluator.Factory::new) + ); public LessThan(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.LT, zoneId, evaluatorMap); } @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); - } - - @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, LessThan::new, left(), right(), zoneId()); } @Override - protected org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThan replaceChildren( - Expression newLeft, - Expression newRight - ) { + protected LessThan replaceChildren(Expression newLeft, Expression newRight) { return new LessThan(source(), newLeft, newRight, zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java index e75733a9e2340..ac6a92aaf097b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqual.java @@ -8,29 +8,38 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class LessThanOrEqual extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.INTEGER, LessThanOrEqualIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, LessThanOrEqualDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, LessThanOrEqualLongsEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, LessThanOrEqualKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, LessThanOrEqualKeywordsEvaluator.Factory::new) + ); -public class LessThanOrEqual extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.LessThanOrEqual { public LessThanOrEqual(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.LTE, zoneId, evaluatorMap); } @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); - } - - @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, LessThanOrEqual::new, left(), right(), zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java index 6fbed572cdc01..9c931ec7433eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEquals.java @@ -8,45 +8,44 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.predicate.Negatable; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.time.ZoneId; +import java.util.Map; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; +public class NotEquals extends EsqlBinaryComparison implements Negatable { + private static final Map evaluatorMap = Map.ofEntries( + Map.entry(DataTypes.BOOLEAN, NotEqualsBoolsEvaluator.Factory::new), + Map.entry(DataTypes.INTEGER, NotEqualsIntsEvaluator.Factory::new), + Map.entry(DataTypes.DOUBLE, NotEqualsDoublesEvaluator.Factory::new), + Map.entry(DataTypes.LONG, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.UNSIGNED_LONG, NotEqualsLongsEvaluator.Factory::new), + Map.entry(DataTypes.DATETIME, NotEqualsLongsEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_POINT, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_POINT, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.GEO_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(EsqlDataTypes.CARTESIAN_SHAPE, NotEqualsGeometriesEvaluator.Factory::new), + Map.entry(DataTypes.KEYWORD, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.TEXT, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.VERSION, NotEqualsKeywordsEvaluator.Factory::new), + Map.entry(DataTypes.IP, NotEqualsKeywordsEvaluator.Factory::new) + ); -public class NotEquals extends org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NotEquals { - public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) { - super(source, left, right, zoneId); - } - - @Override - protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrdinal paramOrdinal) { - return EsqlTypeResolutions.isExact(e, sourceText(), DEFAULT); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); - } - - @Override - protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { - return new NotEquals(source(), newLeft, newRight, zoneId()); + public NotEquals(Source source, Expression left, Expression right) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, evaluatorMap); } - @Override - public NotEquals swapLeftAndRight() { - return new NotEquals(source(), right(), left(), zoneId()); - } - - @Override - public BinaryComparison negate() { - return new Equals(source(), left(), right(), zoneId()); + public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId) { + super(source, left, right, BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, zoneId, evaluatorMap); } @Evaluator(extraName = "Ints") @@ -78,4 +77,29 @@ static boolean processBools(boolean lhs, boolean rhs) { static boolean processGeometries(BytesRef lhs, BytesRef rhs) { return false == lhs.equals(rhs); } + + @Override + public BinaryComparison reverse() { + return this; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); + } + + @Override + protected NotEquals replaceChildren(Expression newLeft, Expression newRight) { + return new NotEquals(source(), newLeft, newRight, zoneId()); + } + + @Override + public NotEquals swapLeftAndRight() { + return new NotEquals(source(), right(), left(), zoneId()); + } + + @Override + public BinaryComparison negate() { + return new Equals(source(), left(), right(), zoneId()); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 0f550862ed1fa..a45707a0197d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -43,10 +43,10 @@ interface DatetimeArithmeticEvaluator { Expression left, Expression right, OperationSymbol op, - ArithmeticEvaluator ints, - ArithmeticEvaluator longs, - ArithmeticEvaluator ulongs, - ArithmeticEvaluator doubles, + BinaryEvaluator ints, + BinaryEvaluator longs, + BinaryEvaluator ulongs, + BinaryEvaluator doubles, DatetimeArithmeticEvaluator datetimes ) { super(source, left, right, op, ints, longs, ulongs, doubles); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 22f5798e5b1c4..ba283bc4d877b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -71,14 +71,15 @@ public String symbol() { } /** Arithmetic (quad) function. */ - interface ArithmeticEvaluator { + @FunctionalInterface + public interface BinaryEvaluator { ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory lhs, ExpressionEvaluator.Factory rhs); } - private final ArithmeticEvaluator ints; - private final ArithmeticEvaluator longs; - private final ArithmeticEvaluator ulongs; - private final ArithmeticEvaluator doubles; + private final BinaryEvaluator ints; + private final BinaryEvaluator longs; + private final BinaryEvaluator ulongs; + private final BinaryEvaluator doubles; private DataType dataType; @@ -87,10 +88,10 @@ interface ArithmeticEvaluator { Expression left, Expression right, OperationSymbol op, - ArithmeticEvaluator ints, - ArithmeticEvaluator longs, - ArithmeticEvaluator ulongs, - ArithmeticEvaluator doubles + BinaryEvaluator ints, + BinaryEvaluator longs, + BinaryEvaluator ulongs, + BinaryEvaluator doubles ) { super(source, left, right, op); this.ints = ints; @@ -139,7 +140,7 @@ protected TypeResolution checkCompatibility() { return TypeResolution.TYPE_RESOLVED; } - static String formatIncompatibleTypesMessage(String symbol, DataType leftType, DataType rightType) { + public static String formatIncompatibleTypesMessage(String symbol, DataType leftType, DataType rightType) { return format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol, leftType.typeName(), rightType.typeName()); } @@ -152,7 +153,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function errorsForCasesWithoutExamples( return suppliers; } + public static String errorMessageStringForBinaryOperators( + boolean includeOrdinal, + List> validPerPosition, + List types + ) { + try { + return typeErrorMessage(includeOrdinal, validPerPosition, types); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is from the combination + if (types.get(0).equals(DataTypes.UNSIGNED_LONG)) { + return "first argument of [] is [unsigned_long] and second is [" + + types.get(1).typeName() + + "]. [unsigned_long] can only be operated on together with another [unsigned_long]"; + + } + if (types.get(1).equals(DataTypes.UNSIGNED_LONG)) { + return "first argument of [] is [" + + types.get(0).typeName() + + "] and second is [unsigned_long]. [unsigned_long] can only be operated on together with another [unsigned_long]"; + } + return "first argument of [] is [" + + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) + + "] so second argument must also be [" + + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) + + "] but was [" + + types.get(1).typeName() + + "]"; + + } + } + /** * Adds test cases containing unsupported parameter types that immediately fail. */ @@ -931,6 +962,24 @@ protected static String typeErrorMessage(boolean includeOrdinal, List types) { return types.stream().map(t -> "<" + t.typeName() + ">").collect(Collectors.joining(", ")); } + public static List stringCases( + BinaryOperator expected, + BiFunction evaluatorToString, + List warnings, + DataType expectedType + ) { + List lhsSuppliers = new ArrayList<>(); + List rhsSuppliers = new ArrayList<>(); + List suppliers = new ArrayList<>(); + for (DataType type : AbstractConvertFunction.STRING_TYPES) { + lhsSuppliers.addAll(stringCases(type)); + rhsSuppliers.addAll(stringCases(type)); + casesCrossProduct( + expected, + lhsSuppliers, + rhsSuppliers, + evaluatorToString, + (lhs, rhs) -> warnings, + suppliers, + expectedType, + true + ); + } + return suppliers; + } + @Override public TestCase get() { TestCase supplied = supplier.get(); @@ -258,14 +284,14 @@ public static List castToDoubleSuppliersFromRange(Double Min, return suppliers; } - public record NumericTypeTestConfig(Number min, Number max, BinaryOperator expected, String evaluatorName) {} + public record NumericTypeTestConfig(Number min, Number max, BiFunction expected, String evaluatorName) {} - public record NumericTypeTestConfigs( - NumericTypeTestConfig intStuff, - NumericTypeTestConfig longStuff, - NumericTypeTestConfig doubleStuff + public record NumericTypeTestConfigs( + NumericTypeTestConfig intStuff, + NumericTypeTestConfig longStuff, + NumericTypeTestConfig doubleStuff ) { - public NumericTypeTestConfig get(DataType type) { + public NumericTypeTestConfig get(DataType type) { if (type == DataTypes.INTEGER) { return intStuff; } @@ -312,8 +338,47 @@ public static List getSuppliersForNumericType(DataType type, throw new IllegalArgumentException("bogus numeric type [" + type + "]"); } + public static List forBinaryComparisonWithWidening( + NumericTypeTestConfigs typeStuff, + String lhsName, + String rhsName, + BiFunction> warnings, + boolean allowRhsZero + ) { + List suppliers = new ArrayList<>(); + List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + + for (DataType lhsType : numericTypes) { + for (DataType rhsType : numericTypes) { + DataType expected = widen(lhsType, rhsType); + NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + + "[" + + lhsName + + "=" + + getCastEvaluator("Attribute[channel=0]", lhs, expected) + + ", " + + rhsName + + "=" + + getCastEvaluator("Attribute[channel=1]", rhs, expected) + + "]"; + casesCrossProduct( + (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), + getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), + getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max(), allowRhsZero), + evaluatorToString, + warnings, + suppliers, + DataTypes.BOOLEAN, + true + ); + } + } + return suppliers; + } + public static List forBinaryWithWidening( - NumericTypeTestConfigs typeStuff, + NumericTypeTestConfigs typeStuff, String lhsName, String rhsName, BiFunction> warnings, @@ -325,7 +390,7 @@ public static List forBinaryWithWidening( for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { DataType expected = widen(lhsType, rhsType); - NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + "[" + lhsName @@ -885,7 +950,7 @@ public static List doubleCases(double min, double max, boolea return cases; } - private static List booleanCases() { + public static List booleanCases() { return List.of( new TypedDataSupplier("", () -> true, DataTypes.BOOLEAN), new TypedDataSupplier("", () -> false, DataTypes.BOOLEAN) @@ -1267,9 +1332,14 @@ public Matcher evaluatorToString() { * exists because we can't generate random values from the test parameter generation functions, and instead need to return * suppliers which generate the random values at test execution time. */ - public record TypedDataSupplier(String name, Supplier supplier, DataType type) { + public record TypedDataSupplier(String name, Supplier supplier, DataType type, boolean forceLiteral) { + + public TypedDataSupplier(String name, Supplier supplier, DataType type) { + this(name, supplier, type, false); + } + public TypedData get() { - return new TypedData(supplier.get(), type, name); + return new TypedData(supplier.get(), type, name, forceLiteral); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index c40d037890d53..2596959c449db 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -43,20 +43,20 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); suppliers.addAll( TestCaseSupplier.forBinaryWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfigs( + new TestCaseSupplier.NumericTypeTestConfig<>( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> l.intValue() + r.intValue(), "AddIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> l.longValue() + r.longValue(), "AddLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> l.doubleValue() + r.doubleValue(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index 528324f07a086..f3348ab2dcba5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -34,20 +34,20 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); suppliers.addAll( TestCaseSupplier.forBinaryWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfigs( + new TestCaseSupplier.NumericTypeTestConfig<>( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> l.intValue() / r.intValue(), "DivIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> l.longValue() / r.longValue(), "DivLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> { + new TestCaseSupplier.NumericTypeTestConfig<>(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> { double v = l.doubleValue() / r.doubleValue(); if (Double.isFinite(v)) { return v; @@ -90,20 +90,20 @@ public static Iterable parameters() { suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), DivTests::divErrorMessageString); // Divide by zero cases - all of these should warn and return null - TestCaseSupplier.NumericTypeTestConfigs typeStuff = new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig( + TestCaseSupplier.NumericTypeTestConfigs typeStuff = new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> null, "DivIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> null, "DivLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> null, @@ -115,7 +115,7 @@ public static Iterable parameters() { for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); - TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + "[" + "lhs" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index d2af83e91ec64..a70f2c7885257 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -34,20 +34,20 @@ public static Iterable parameters() { List suppliers = new ArrayList<>(); suppliers.addAll( TestCaseSupplier.forBinaryWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfigs( + new TestCaseSupplier.NumericTypeTestConfig<>( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> l.intValue() % r.intValue(), "ModIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> l.longValue() % r.longValue(), "ModLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> l.doubleValue() % r.doubleValue(), @@ -77,20 +77,20 @@ public static Iterable parameters() { suppliers = errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), ModTests::modErrorMessageString); // Divide by zero cases - all of these should warn and return null - TestCaseSupplier.NumericTypeTestConfigs typeStuff = new TestCaseSupplier.NumericTypeTestConfigs( - new TestCaseSupplier.NumericTypeTestConfig( + TestCaseSupplier.NumericTypeTestConfigs typeStuff = new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( (Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1, (l, r) -> null, "ModIntsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( (Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1, (l, r) -> null, "ModLongsEvaluator" ), - new TestCaseSupplier.NumericTypeTestConfig( + new TestCaseSupplier.NumericTypeTestConfig<>( Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, (l, r) -> null, @@ -102,7 +102,7 @@ public static Iterable parameters() { for (DataType lhsType : numericTypes) { for (DataType rhsType : numericTypes) { DataType expected = TestCaseSupplier.widen(lhsType, rhsType); - TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + TestCaseSupplier.NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); BiFunction evaluatorToString = (lhs, rhs) -> expectedTypeStuff.evaluatorName() + "[" + "lhs" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java index 0a1e9bdfaf34b..0739cd4670c08 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java @@ -11,52 +11,198 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class EqualsTests extends AbstractBinaryComparisonTestCase { +public class EqualsTests extends AbstractFunctionTestCase { public EqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int == Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() == r.intValue(), + "EqualsIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() == r.longValue(), + "EqualsLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() == r.doubleValue(), + "EqualsDoublesEvaluator" + ) ), - "EqualsIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + (lhs, rhs) -> List.of(), + false + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsLongsEvaluator", + "lhs", + "rhs", + Object::equals, DataTypes.BOOLEAN, - equalTo(lhs == rhs) - ); - }))); - } + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsBoolsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.booleanCases(), + TestCaseSupplier.booleanCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsKeywordsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsKeywordsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsLongsEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); - @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.equals(rhs)); - } + suppliers.addAll( + TestCaseSupplier.stringCases( + Object::equals, + (lhsType, rhsType) -> "EqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new Equals(source, lhs, rhs); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.geoPointCases(), + TestCaseSupplier.geoPointCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.geoShapeCases(), + TestCaseSupplier.geoShapeCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianPointCases(), + TestCaseSupplier.cartesianPointCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "EqualsGeometriesEvaluator", + "lhs", + "rhs", + Object::equals, + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianShapeCases(), + TestCaseSupplier.cartesianShapeCases(), + List.of(), + false + ) + ); + + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override - protected boolean isEquality() { - return true; + protected Expression build(Source source, List args) { + return new Equals(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index ad8dba7d63065..f45dedff837c4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -11,26 +11,25 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class GreaterThanOrEqualTests extends AbstractBinaryComparisonTestCase { +public class GreaterThanOrEqualTests extends AbstractFunctionTestCase { public GreaterThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { + // ToDo: Add the full set of typed test cases here return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int >= Int", () -> { int rhs = randomInt(); int lhs = randomInt(); @@ -47,17 +46,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) >= 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new GreaterThanOrEqual(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new GreaterThanOrEqual(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index b631a742f7885..e872af5b7c772 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -11,26 +11,25 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class GreaterThanTests extends AbstractBinaryComparisonTestCase { +public class GreaterThanTests extends AbstractFunctionTestCase { public GreaterThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { + // ToDo: Add the full set of typed test cases here return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int > Int", () -> { int rhs = randomInt(); int lhs = randomInt(); @@ -47,17 +46,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) > 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new GreaterThan(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new GreaterThan(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 7864a0dda9fe3..8bba0c4a5afb5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -11,20 +11,18 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class LessThanOrEqualTests extends AbstractBinaryComparisonTestCase { +public class LessThanOrEqualTests extends AbstractFunctionTestCase { public LessThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -47,17 +45,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) <= 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new LessThanOrEqual(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new LessThanOrEqual(source, args.get(0), args.get(1), null); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index 826e88551077d..ab726dc51fbe4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -11,20 +11,18 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; import java.util.List; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -public class LessThanTests extends AbstractBinaryComparisonTestCase { +public class LessThanTests extends AbstractFunctionTestCase { public LessThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -47,17 +45,7 @@ public static Iterable parameters() { } @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(lhs.compareTo(rhs) < 0); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new LessThan(source, lhs, rhs, ZoneOffset.UTC); - } - - @Override - protected boolean isEquality() { - return false; + protected Expression build(Source source, List args) { + return new LessThan(source, args.get(0), args.get(1), null); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java index 0d6bb32fe2488..ec5d2338adae2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java @@ -11,53 +11,192 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.hamcrest.Matcher; -import java.time.ZoneOffset; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - -public class NotEqualsTests extends AbstractBinaryComparisonTestCase { +public class NotEqualsTests extends AbstractFunctionTestCase { public NotEqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int != Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() != r.intValue(), + "NotEqualsIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() != r.longValue(), + "NotEqualsLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() != r.doubleValue(), + "NotEqualsDoublesEvaluator" + ) ), - "NotEqualsIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + (lhs, rhs) -> List.of(), + false + ) + ); + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsLongsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), DataTypes.BOOLEAN, - equalTo(lhs != rhs) - ); - }))); - } - - @Override - protected > Matcher resultMatcher(T lhs, T rhs) { - return equalTo(false == lhs.equals(rhs)); - } - - @Override - protected BinaryComparison build(Source source, Expression lhs, Expression rhs) { - return new NotEquals(source, lhs, rhs, ZoneOffset.UTC); + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), + List.of(), + true + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsBoolsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.booleanCases(), + TestCaseSupplier.booleanCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsLongsEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.stringCases( + (l, r) -> false == l.equals(r), + (lhsType, rhsType) -> "NotEqualsKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.geoPointCases(), + TestCaseSupplier.geoPointCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.geoShapeCases(), + TestCaseSupplier.geoShapeCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianPointCases(), + TestCaseSupplier.cartesianPointCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "NotEqualsGeometriesEvaluator", + "lhs", + "rhs", + (l, r) -> false == l.equals(r), + DataTypes.BOOLEAN, + TestCaseSupplier.cartesianShapeCases(), + TestCaseSupplier.cartesianShapeCases(), + List.of(), + false + ) + ); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override - protected boolean isEquality() { - return true; + protected Expression build(Source source, List args) { + return new NotEquals(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index 01fcd222a5141..28944252191be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -495,7 +495,7 @@ public void testPropagateEquals_VarEq2OrVarNeq5() { // a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { FieldAttribute fa = getFieldAttribute(); - org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals eq = equalsOf(fa, TWO); + Equals eq = equalsOf(fa, TWO); Range range = rangeOf(fa, THREE, false, FOUR, false); GreaterThan gt = greaterThanOf(fa, TWO); NotEquals neq = notEqualsOf(fa, TWO); From 05a25bfd3c6021a603173d941fa6997259889de2 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 16 Apr 2024 20:41:59 +0200 Subject: [PATCH 061/130] Avoid allocating 1k buffers for tiny slices/files in BlobCacheBufferedIndexInput (#107461) Looking through some real world heap dumps, there's at times a lot of instances of these things that have a 1k buffer allocated but are only a couple of bytes in length. We can save tens of MB in examined cases by just sizing the buffer smarter here. --- .../common/BlobCacheBufferedIndexInput.java | 19 ++++++++++++++----- .../input/DirectBlobContainerIndexInput.java | 9 +-------- .../input/MetadataCachingIndexInput.java | 11 ++--------- 3 files changed, 17 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 71ee6da24cdc7..cc193e8e2cfee 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -42,6 +42,8 @@ public abstract class BlobCacheBufferedIndexInput extends IndexInput implements // LUCENE-888 for details. public static final int MERGE_BUFFER_SIZE = 4096; + private final long length; + private final int bufferSize; protected ByteBuffer buffer = EMPTY_BYTEBUFFER; @@ -56,15 +58,22 @@ public final byte readByte() throws IOException { return buffer.get(); } - public BlobCacheBufferedIndexInput(String resourceDesc, IOContext context) { - this(resourceDesc, bufferSize(context)); + public BlobCacheBufferedIndexInput(String resourceDesc, IOContext context, long length) { + this(resourceDesc, bufferSize(context), length); } /** Inits BufferedIndexInput with a specific bufferSize */ - public BlobCacheBufferedIndexInput(String resourceDesc, int bufferSize) { + public BlobCacheBufferedIndexInput(String resourceDesc, int bufferSize, long length) { super(resourceDesc); - checkBufferSize(bufferSize); - this.bufferSize = bufferSize; + int bufSize = Math.max(MIN_BUFFER_SIZE, (int) Math.min(bufferSize, length)); + checkBufferSize(bufSize); + this.bufferSize = bufSize; + this.length = length; + } + + @Override + public final long length() { + return length; } public int getBufferSize() { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java index 37b5fd5c14a95..ecc6588ffdf52 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java @@ -72,7 +72,6 @@ public final class DirectBlobContainerIndexInput extends BlobCacheBufferedIndexI private final FileInfo fileInfo; private final IndexInputStats stats; private final long offset; - private final long length; // the following are only mutable so they can be adjusted after cloning/slicing private volatile boolean isClone; @@ -101,7 +100,7 @@ private DirectBlobContainerIndexInput( long length, long sequentialReadSize ) { - super(name, bufferSize); // TODO should use blob cache + super(name, bufferSize, length); // TODO should use blob cache this.position = position; assert sequentialReadSize >= 0; this.sequentialReadSize = sequentialReadSize; @@ -111,7 +110,6 @@ private DirectBlobContainerIndexInput( : "this method should only be used with blobs that are NOT stored in metadata's hash field " + "(fileInfo: " + fileInfo + ')'; this.stats = Objects.requireNonNull(stats); this.offset = offset; - this.length = length; this.closed = new AtomicBoolean(false); this.isClone = false; } @@ -332,11 +330,6 @@ public void close() throws IOException { } } - @Override - public long length() { - return length; - } - @Override public String toString() { return super.toString() + "[read seq=" + (streamForSequentialReads != null ? "yes" : "no") + ']'; diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index ff8633bdaad17..cd9aa5aec74cb 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -100,7 +100,6 @@ public abstract class MetadataCachingIndexInput extends BlobCacheBufferedIndexIn protected final IOContext context; protected final IndexInputStats stats; private final long offset; - private final long length; // the following are only mutable so they can be adjusted after cloning/slicing private volatile boolean isClone; @@ -122,7 +121,7 @@ protected MetadataCachingIndexInput( ByteRange headerBlobCacheByteRange, ByteRange footerBlobCacheByteRange ) { - super(name, context); + super(name, context, length); this.isCfs = IndexFileNames.matchesExtension(name, "cfs"); this.logger = Objects.requireNonNull(logger); this.fileInfo = Objects.requireNonNull(fileInfo); @@ -131,7 +130,6 @@ protected MetadataCachingIndexInput( : "this method should only be used with blobs that are NOT stored in metadata's hash field " + "(fileInfo: " + fileInfo + ')'; this.stats = Objects.requireNonNull(stats); this.offset = offset; - this.length = length; this.closed = new AtomicBoolean(false); this.isClone = false; this.directory = Objects.requireNonNull(directory); @@ -160,7 +158,7 @@ protected MetadataCachingIndexInput(MetadataCachingIndexInput input) { input.stats, input.offset, input.compoundFileOffset, - input.length, + input.length(), input.cacheFileReference, input.defaultRangeSize, input.recoveryRangeSize, @@ -645,11 +643,6 @@ public final void close() throws IOException { } } - @Override - public final long length() { - return length; - } - @Override public MetadataCachingIndexInput clone() { final MetadataCachingIndexInput clone = (MetadataCachingIndexInput) super.clone(); From 75badc2f2f40d29da0ac15d1ebb1eb10a4851b89 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 16 Apr 2024 20:48:53 +0200 Subject: [PATCH 062/130] Fix IpScriptFieldType CI - #106900 (#107440) I've been tracing the problems with these tests (as the [first attempt I made](https://github.com/elastic/elasticsearch/pull/107066) was unrelated to the actual bug). I discovered that the actual problem was that the `BytesRefHash` for `terms` in the `IpScriptFieldTermsQuery` was not finding terms that were actually there. The seed that was used to reproduce this failure was triggering multiple slices for performing the search. As `BytesRefHash` is not a threadsafe class, that made me think about some kind of synchronization issue with the underlying `BytesRefHash` structure for the `IpScriptFieldTermsQuery` Adding a `synchronized` block to the `terms` on access removed the problem. I've tried to reproduce the issue with > 90k iterations of the tests and have been unable to reproduce it. Closes #106900 --- .../common/util/BytesRefHash.java | 30 +++++++++++++++- .../runtime/AbstractIpScriptFieldQuery.java | 2 +- .../runtime/IpScriptFieldTermsQuery.java | 34 +++++++++++++++++-- .../index/mapper/IpScriptFieldTypeTests.java | 1 - .../runtime/IpScriptFieldTermsQueryTests.java | 30 ++++++++++++---- 5 files changed, 84 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java index 8ebc920af961b..9e0d14d6bfb27 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefHash.java @@ -133,10 +133,14 @@ public BytesRef get(long id, BytesRef dest) { * Get the id associated with key */ public long find(BytesRef key, int code) { + return find(key, code, spare); + } + + private long find(BytesRef key, int code, BytesRef intermediate) { final long slot = slot(rehash(code), mask); for (long index = slot;; index = nextSlot(index, mask)) { final long id = id(index); - if (id == -1L || key.bytesEquals(get(id, spare))) { + if (id == -1L || key.bytesEquals(get(id, intermediate))) { return id; } } @@ -147,6 +151,15 @@ public long find(BytesRef key) { return find(key, key.hashCode()); } + /** + * Allows finding a key in the hash in a thread safe manner, by providing an intermediate + * BytesRef reference to storing intermediate results. As long as each thread provides + * its own intermediate instance, this method is thread safe. + */ + private long threadSafeFind(BytesRef key, BytesRef intermediate) { + return find(key, key.hashCode(), intermediate); + } + private long set(BytesRef key, int code, long id) { assert rehash(key.hashCode()) == code; assert size < maxSize; @@ -236,4 +249,19 @@ public long ramBytesUsed() { return BASE_RAM_BYTES_USED + bytesRefs.ramBytesUsed() + ids.ramBytesUsed() + hashes.ramBytesUsed() + spare.bytes.length; } + /** + * Returns a finder class that can be used to find keys in the hash in a thread-safe manner + */ + public Finder newFinder() { + return new Finder(); + } + + public class Finder { + private final BytesRef intermediate = new BytesRef(); + + public long find(BytesRef key) { + return threadSafeFind(key, intermediate); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractIpScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractIpScriptFieldQuery.java index 52b81a3e613c0..2d4df68cd83de 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractIpScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractIpScriptFieldQuery.java @@ -36,7 +36,7 @@ protected final boolean matches(IpFieldScript scriptContext, int docId) { /** * Does the value match this query? */ - protected abstract boolean matches(BytesRef[] values, int conut); + protected abstract boolean matches(BytesRef[] values, int count); protected static InetAddress decode(BytesRef ref) { return InetAddressPoint.decode(BytesReference.toBytes(new BytesArray(ref))); diff --git a/server/src/main/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQuery.java index b1f5d99b1ca4d..06bce89cea020 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQuery.java @@ -8,6 +8,8 @@ package org.elasticsearch.search.runtime; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.util.BytesRefHash; @@ -24,16 +26,42 @@ public IpScriptFieldTermsQuery(Script script, IpFieldScript.LeafFactory leafFact this.terms = terms; } - @Override - protected boolean matches(BytesRef[] values, int count) { + boolean matches(BytesRef[] values, int count, BytesRefHash.Finder finder) { for (int i = 0; i < count; i++) { - if (terms.find(values[i]) >= 0) { + if (finder.find(values[i]) >= 0) { return true; } } return false; } + @Override + protected final boolean matches(BytesRef[] values, int count) { + throw new UnsupportedOperationException("This leads to non-thread safe usage of BytesRefHash; use createTwoPhaseIterator instead"); + } + + boolean matches(IpFieldScript scriptContext, int docId, BytesRefHash.Finder finder) { + scriptContext.runForDoc(docId); + return matches(scriptContext.values(), scriptContext.count(), finder); + } + + protected final TwoPhaseIterator createTwoPhaseIterator(IpFieldScript scriptContext, DocIdSetIterator approximation) { + return new TwoPhaseIterator(approximation) { + private final BytesRefHash.Finder finder = terms.newFinder(); + + @Override + public boolean matches() { + // We need to use a thread safe finder, as this can be called from multiple threads + return IpScriptFieldTermsQuery.this.matches(scriptContext, approximation.docID(), finder); + } + + @Override + public float matchCost() { + return MATCH_COST; + } + }; + } + @Override public final String toString(String field) { StringBuilder b = new StringBuilder(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index bf16456c7476e..4726424ada5f2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -228,7 +228,6 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) } @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106900") public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); diff --git a/server/src/test/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQueryTests.java index 369672d6aabd1..3ba704411b5a0 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/IpScriptFieldTermsQueryTests.java @@ -15,19 +15,34 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.script.Script; +import java.net.InetAddress; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.Mockito.mock; -public class IpScriptFieldTermsQueryTests extends AbstractIpScriptFieldQueryTestCase { +public class IpScriptFieldTermsQueryTests extends AbstractScriptFieldQueryTestCase { @Override protected IpScriptFieldTermsQuery createTestInstance() { return createTestInstance(between(1, 100)); } + protected final IpFieldScript.LeafFactory leafFactory = mock(IpFieldScript.LeafFactory.class); + + @Override + public final void testVisit() { + assertEmptyVisit(); + } + + protected static BytesRef encode(InetAddress addr) { + return new BytesRef(InetAddressPoint.encode(addr)); + } + private IpScriptFieldTermsQuery createTestInstance(int size) { BytesRefHash terms = new BytesRefHash(size, BigArrays.NON_RECYCLING_INSTANCE); while (terms.size() < size) { @@ -80,12 +95,13 @@ public void testMatches() { terms.add(ip1); terms.add(ip2); IpScriptFieldTermsQuery query = new IpScriptFieldTermsQuery(randomScript(), leafFactory, "test", terms); - assertTrue(query.matches(new BytesRef[] { ip1 }, 1)); - assertTrue(query.matches(new BytesRef[] { ip2 }, 1)); - assertTrue(query.matches(new BytesRef[] { ip1, notIp }, 2)); - assertTrue(query.matches(new BytesRef[] { notIp, ip1 }, 2)); - assertFalse(query.matches(new BytesRef[] { notIp }, 1)); - assertFalse(query.matches(new BytesRef[] { notIp, ip1 }, 1)); + BytesRefHash.Finder finder = terms.newFinder(); + assertTrue(query.matches(new BytesRef[] { ip1 }, 1, finder)); + assertTrue(query.matches(new BytesRef[] { ip2 }, 1, finder)); + assertTrue(query.matches(new BytesRef[] { ip1, notIp }, 2, finder)); + assertTrue(query.matches(new BytesRef[] { notIp, ip1 }, 2, finder)); + assertFalse(query.matches(new BytesRef[] { notIp }, 1, finder)); + assertFalse(query.matches(new BytesRef[] { notIp, ip1 }, 1, finder)); } @Override From ac5f5640e54f1188d26cabc5d291f81b145d4d4d Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Tue, 16 Apr 2024 14:55:55 -0400 Subject: [PATCH 063/130] [DOCS] Update tutorial to discourage editing managed ILM policies (#107074) --- .../example-index-lifecycle-policy.asciidoc | 70 +++++++++++++------ 1 file changed, 49 insertions(+), 21 deletions(-) diff --git a/docs/reference/ilm/example-index-lifecycle-policy.asciidoc b/docs/reference/ilm/example-index-lifecycle-policy.asciidoc index 91a5283a9b635..6ec261fabc448 100644 --- a/docs/reference/ilm/example-index-lifecycle-policy.asciidoc +++ b/docs/reference/ilm/example-index-lifecycle-policy.asciidoc @@ -1,5 +1,3 @@ -[role="xpack"] - [[example-using-index-lifecycle-policy]] === Tutorial: Customize built-in {ilm-init} policies @@ -9,9 +7,9 @@ {es} includes the following built-in {ilm-init} policies: -- `logs` -- `metrics` -- `synthetics` +- `logs@lifecycle` +- `metrics@lifecycle` +- `synthetics@lifecycle` {agent} uses these policies to manage backing indices for its data streams. This tutorial shows you how to use {kib}’s **Index Lifecycle Policies** to @@ -67,20 +65,25 @@ node.roles: [ data_warm ] cluster. [discrete] -[[example-using-index-lifecycle-policy-view-ilm-policy]] -==== View the policy +[[example-using-index-lifecycle-policy-duplicate-ilm-policy]] +==== Duplicate the policy {agent} uses data streams with an index pattern of `logs-*-*` to store log -monitoring data. The built-in `logs` {ilm-init} policy automatically manages -backing indices for these data streams. +monitoring data. The managed `logs@lifecycle` {ilm-init} policy automatically manages +backing indices for these data streams. + +If you don't want to use the policy defaults, then you can customize the managed policy and then save it as a new policy. You can then use the new policy in related component templates and index templates. + +CAUTION: You should never edit managed policies directly. Changes to managed policies might be rolled back or overwritten. -To view the `logs` policy in {kib}: +To save the `logs@lifecycle` policy as a new policy in {kib}: -. Open the menu and go to **Stack Management > Index Lifecycle Policies**. -. Select **Include managed system policies**. -. Select the `logs` policy. +. Open the menu and go to **Stack Management** > **Index Lifecycle Policies**. +. Toggle **Include managed system policies**. +. Select the `logs@lifecycle` policy. +. On the **Edit policy logs** page, toggle **Save as new policy**, and then provide a new name for the policy, for example, `logs-custom`. -The `logs` policy uses the recommended rollover defaults: Start writing to a new +The `logs@lifecycle` policy uses the recommended rollover defaults: Start writing to a new index when the current write index reaches 50GB or becomes 30 days old. To view or change the rollover settings, click **Advanced settings** for the hot @@ -90,16 +93,12 @@ settings. [role="screenshot"] image::images/ilm/tutorial-ilm-hotphaserollover-default.png[View rollover defaults] -Note that {kib} displays a warning that editing a managed policy can break -Kibana. For this tutorial, you can ignore that warning and proceed with -modifying the policy. - [discrete] [[ilm-ex-modify-policy]] ==== Modify the policy -The default `logs` policy is designed to prevent the creation of many tiny daily -indices. You can modify the policy to meet your performance requirements and +The default `logs@lifecycle` policy is designed to prevent the creation of many tiny daily +indices. You can modify your copy of the policy to meet your performance requirements and manage resource usage. . Activate the warm phase and click **Advanced settings**. @@ -127,4 +126,33 @@ deletes indices 90 days after rollover. [role="screenshot"] image::images/ilm/tutorial-ilm-delete-rollover.png[Add a delete phase] -. Click **Save Policy**. +. Click **Save as new policy**. + +TIP: Copies of managed {ilm-init} policies are also marked as **Managed**. You can use the <> to update the `_meta.managed` parameter to `false`. + +[discrete] +[[example-using-index-lifecycle-policy-apply-policy]] +==== Apply the policy + +To apply your new {ilm-init} policy to the `logs` index template, create or edit the `logs@custom` component template. + +A `*@custom` component template allows you to customize the mappings and settings of managed index templates, without having to override managed index templates or component templates. This type of component template is automatically picked up by the index template. <>. + +. Click on the **Component Template** tab and click **Create component template**. +. Under **Logistics**, name the component template `logs@custom`. +. Under **Index settings**, set the {ilm-init} policy name created in the previous step: ++ +[source,JSON] +-------------------------------------------------- +{ + "index": { + "lifecycle": { + "name": "logs-custom" + } + } +} +-------------------------------------------------- ++ +. Continue to **Review**, and then click **Save component template**. +. Click the **Index Templates**, tab, and then select the `logs` index template. +. In the summary, view the **Component templates** list. `logs@custom` should be listed. \ No newline at end of file From 5a2dc6dc00e18176f01e59fbad38ffee7b39274a Mon Sep 17 00:00:00 2001 From: William Brafford Date: Tue, 16 Apr 2024 17:11:45 -0400 Subject: [PATCH 064/130] Fix system index thread pool tests (#107443) * Allow rejected executions when filling up a thread pool queue * Move test to integration tests * Avoid setting maxConcurrentShardRequests to 1 * Test all index descriptors defined in the Kibana plugin --- modules/kibana/build.gradle | 1 + .../kibana/KibanaThreadPoolIT.java | 64 +++++++++++++++++++ .../kibana/KibanaThreadPoolTests.java | 52 --------------- ...ava => SystemIndexThreadPoolTestCase.java} | 20 ++++-- 4 files changed, 80 insertions(+), 57 deletions(-) create mode 100644 modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java delete mode 100644 modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java rename test/framework/src/main/java/org/elasticsearch/indices/{SystemIndexThreadPoolTests.java => SystemIndexThreadPoolTestCase.java} (81%) diff --git a/modules/kibana/build.gradle b/modules/kibana/build.gradle index b38b446c67fd9..a4ff51e99069c 100644 --- a/modules/kibana/build.gradle +++ b/modules/kibana/build.gradle @@ -5,6 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ +apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { diff --git a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java new file mode 100644 index 0000000000000..317bfa9edd1c9 --- /dev/null +++ b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.kibana; + +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.SystemIndexThreadPoolTestCase; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.stream.Stream; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class KibanaThreadPoolIT extends SystemIndexThreadPoolTestCase { + + @Override + protected Collection> nodePlugins() { + return Set.of(KibanaPlugin.class); + } + + public void testKibanaThreadPool() { + List kibanaSystemIndices = Stream.of( + KibanaPlugin.KIBANA_INDEX_DESCRIPTOR.getIndexPattern(), + KibanaPlugin.REPORTING_INDEX_DESCRIPTOR.getIndexPattern(), + KibanaPlugin.APM_AGENT_CONFIG_INDEX_DESCRIPTOR.getIndexPattern(), + KibanaPlugin.APM_CUSTOM_LINK_INDEX_DESCRIPTOR.getIndexPattern() + ).map(s -> s.replace("*", randomAlphaOfLength(8).toLowerCase(Locale.ROOT))).toList(); + + runWithBlockedThreadPools(() -> { + for (String index : kibanaSystemIndices) { + // index documents + String idToDelete = client().prepareIndex(index).setSource(Map.of("foo", "delete me!")).get().getId(); + String idToUpdate = client().prepareIndex(index).setSource(Map.of("foo", "update me!")).get().getId(); + + // bulk index, delete, and update + Client bulkClient = client(); + BulkResponse response = bulkClient.prepareBulk(index) + .add(bulkClient.prepareIndex(index).setSource(Map.of("foo", "search me!"))) + .add(bulkClient.prepareDelete(index, idToDelete)) + .add(bulkClient.prepareUpdate().setId(idToUpdate).setDoc(Map.of("foo", "I'm updated!"))) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + assertNoFailures(response); + + // match-all search + assertHitCount(client().prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), 2); + } + }); + } +} diff --git a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java deleted file mode 100644 index 0974fd6d36b18..0000000000000 --- a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.kibana; - -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.indices.SystemIndexThreadPoolTests; -import org.elasticsearch.plugins.Plugin; - -import java.util.Collection; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; - -public class KibanaThreadPoolTests extends SystemIndexThreadPoolTests { - - @Override - protected Collection> nodePlugins() { - return Set.of(KibanaPlugin.class); - } - - public void testKibanaThreadPool() { - runWithBlockedThreadPools(() -> { - // index documents - String idToDelete = client().prepareIndex(".kibana").setSource(Map.of("foo", "delete me!")).get().getId(); - String idToUpdate = client().prepareIndex(".kibana").setSource(Map.of("foo", "update me!")).get().getId(); - - // bulk index, delete, and update - Client bulkClient = client(); - BulkResponse response = bulkClient.prepareBulk(".kibana") - .add(bulkClient.prepareIndex(".kibana").setSource(Map.of("foo", "search me!"))) - .add(bulkClient.prepareDelete(".kibana", idToDelete)) - .add(bulkClient.prepareUpdate().setId(idToUpdate).setDoc(Map.of("foo", "I'm updated!"))) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .get(); - assertNoFailures(response); - - // match-all search - assertHitCount(client().prepareSearch(".kibana").setQuery(QueryBuilders.matchAllQuery()), 2); - }); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java similarity index 81% rename from test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java rename to test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java index 7db5d10c5fcfa..b33320539b57f 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java @@ -9,6 +9,7 @@ package org.elasticsearch.indices; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -19,6 +20,7 @@ import java.util.concurrent.Phaser; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.startsWith; /** @@ -31,7 +33,7 @@ *

    When implementing this class, don't forget to override {@link ESIntegTestCase#nodePlugins()} if * the relevant system index is defined in a plugin.

    */ -public abstract class SystemIndexThreadPoolTests extends ESIntegTestCase { +public abstract class SystemIndexThreadPoolTestCase extends ESIntegTestCase { private static final String USER_INDEX = "user_index"; @@ -67,7 +69,6 @@ protected void runWithBlockedThreadPools(Runnable runnable) { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106957") public void testUserThreadPoolsAreBlocked() { assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); @@ -88,9 +89,13 @@ private void assertThreadPoolsBlocked() { assertThat(e2.getMessage(), startsWith("rejected execution of ActionRunnable")); var e3 = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch(USER_INDEX).setQuery(QueryBuilders.matchAllQuery()).get() + () -> client().prepareSearch(USER_INDEX) + .setQuery(QueryBuilders.matchAllQuery()) + // Request times out if max concurrent shard requests is set to 1 + .setMaxConcurrentShardRequests(usually() ? SearchRequest.DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS : randomIntBetween(2, 10)) + .get() ); - assertThat(e3.getMessage(), startsWith("all shards failed")); + assertThat(e3.getMessage(), containsString("all shards failed")); } private void fillThreadPoolQueues() { @@ -101,7 +106,12 @@ private void fillThreadPoolQueues() { // fill up the queue for (int i = 0; i < info.getQueueSize().singles(); i++) { - threadPool.executor(threadPoolName).submit(() -> {}); + try { + threadPool.executor(threadPoolName).submit(() -> {}); + } catch (EsRejectedExecutionException e) { + // we can't be sure that some other task won't get queued in a test cluster + // but we should put all the tasks in there anyway + } } } } From f1f271f6336ff3c692ff44639e246f0d3a03b209 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 16 Apr 2024 14:56:15 -0700 Subject: [PATCH 065/130] [TEST] Unmute TextFieldMapperTests#testBlockLoaderParentFromRowStrideReader (#107548) Randomly stumbled upon this - #104158 which is referenced is closed so this can be unmuted. --- .../org/elasticsearch/index/mapper/TextFieldMapperTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 1c5ae3baca827..5d0c1c01ecdcf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -1454,7 +1454,6 @@ public void testBlockLoaderFromParentColumnReader() throws IOException { testBlockLoaderFromParent(true, randomBoolean()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104158") public void testBlockLoaderParentFromRowStrideReader() throws IOException { testBlockLoaderFromParent(false, randomBoolean()); } From f5c7938ab8043383aebeac0998a40967da55f924 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 16 Apr 2024 16:57:14 -0500 Subject: [PATCH 066/130] Adding cache_stats to geoip stats API (#107334) --- docs/changelog/107334.yaml | 5 + .../ingest/apis/geoip-stats-api.asciidoc | 37 +++++++- .../ingest/geoip/DatabaseNodeService.java | 5 + .../ingest/geoip/GeoIpCache.java | 38 +++++++- .../ingest/geoip/stats/CacheStats.java | 41 +++++++++ .../ingest/geoip/stats/GeoIpStatsAction.java | 26 +++++- .../stats/GeoIpStatsTransportAction.java | 2 + .../ingest/geoip/GeoIpCacheTests.java | 33 +++++++ .../stats/CacheStatsSerializingTests.java | 92 +++++++++++++++++++ ...atsActionNodeResponseSerializingTests.java | 1 + .../GeoIpStatsActionNodeResponseTests.java | 1 + .../test/ingest_geoip/30_geoip_stats.yml | 8 ++ .../org/elasticsearch/TransportVersions.java | 1 + 13 files changed, 287 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/107334.yaml create mode 100644 modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/CacheStats.java create mode 100644 modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/CacheStatsSerializingTests.java diff --git a/docs/changelog/107334.yaml b/docs/changelog/107334.yaml new file mode 100644 index 0000000000000..d1e8df2fa9c40 --- /dev/null +++ b/docs/changelog/107334.yaml @@ -0,0 +1,5 @@ +pr: 107334 +summary: Adding `cache_stats` to geoip stats API +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/reference/ingest/apis/geoip-stats-api.asciidoc b/docs/reference/ingest/apis/geoip-stats-api.asciidoc index 84a2b00737e5a..4917441c0020b 100644 --- a/docs/reference/ingest/apis/geoip-stats-api.asciidoc +++ b/docs/reference/ingest/apis/geoip-stats-api.asciidoc @@ -24,7 +24,7 @@ GET _ingest/geoip/stats `manage` <> to use this API. * If <> is -disabled, this API returns zero values and an empty `nodes` object. +disabled and no custom databases are configured, this API returns zero values and an empty `nodes` object. [role="child_attributes"] [[geoip-stats-api-response-body]] @@ -83,6 +83,41 @@ Downloaded databases for the node. (string) Name of the database. ====== +`cache_stats`:: +(object) +GeoIP cache stats for the node. ++ +.Properties of `cache_stats` +[%collapsible%open] +====== +`count`:: +(Long) +Number of cached entries. + +`hits`:: +(Long) +The number of enrich lookups served from cache. + +`misses`:: +(Long) +The number of times geoIP lookups couldn't be +served from cache. + +`evictions`:: +(Long) +The number cache entries evicted from the cache. + +`hits_time_in_millis`:: +(Long) +The amount of time in milliseconds spent fetching data from the cache on succesful cache hits only. + +`misses_time_in_millis`:: +(Long) +The amount of time in milliseconds spent fetching data from the cache and the backing GeoIP2 database and updating the +cache, on cache misses only. + +====== + `files_in_temp`:: (array of strings) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index 540f463be469c..266d40f2f9d56 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.ingest.geoip.stats.CacheStats; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.search.SearchHit; import org.elasticsearch.watcher.ResourceWatcherService; @@ -506,4 +507,8 @@ public Set getFilesInTemp() { throw new UncheckedIOException(e); } } + + public CacheStats getCacheStats() { + return cache.getCacheStats(); + } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java index ff75325624412..fc70e2fbb594a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java @@ -12,10 +12,14 @@ import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.ingest.geoip.stats.CacheStats; import java.net.InetAddress; import java.nio.file.Path; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; +import java.util.function.LongSupplier; /** * The in-memory cache for the geoip data. There should only be 1 instance of this class. @@ -38,16 +42,24 @@ public String toString() { } }; + private final LongSupplier relativeNanoTimeProvider; private final Cache cache; + private final AtomicLong hitsTimeInNanos = new AtomicLong(0); + private final AtomicLong missesTimeInNanos = new AtomicLong(0); // package private for testing - GeoIpCache(long maxSize) { + GeoIpCache(long maxSize, LongSupplier relativeNanoTimeProvider) { if (maxSize < 0) { throw new IllegalArgumentException("geoip max cache size must be 0 or greater"); } + this.relativeNanoTimeProvider = relativeNanoTimeProvider; this.cache = CacheBuilder.builder().setMaximumWeight(maxSize).build(); } + GeoIpCache(long maxSize) { + this(maxSize, System::nanoTime); + } + @SuppressWarnings("unchecked") T putIfAbsent( InetAddress ip, @@ -56,11 +68,14 @@ T putIfAbsent( ) { // can't use cache.computeIfAbsent due to the elevated permissions for the jackson (run via the cache loader) CacheKey cacheKey = new CacheKey(ip, databasePath); + long cacheStart = relativeNanoTimeProvider.getAsLong(); // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. AbstractResponse response = cache.get(cacheKey); + long cacheRequestTime = relativeNanoTimeProvider.getAsLong() - cacheStart; // populate the cache for this key, if necessary if (response == null) { + long retrieveStart = relativeNanoTimeProvider.getAsLong(); response = retrieveFunction.apply(ip); // if the response from the database was null, then use the no-result sentinel value if (response == null) { @@ -68,6 +83,10 @@ T putIfAbsent( } // store the result or no-result in the cache cache.put(cacheKey, response); + long databaseRequestAndCachePutTime = relativeNanoTimeProvider.getAsLong() - retrieveStart; + missesTimeInNanos.addAndGet(cacheRequestTime + databaseRequestAndCachePutTime); + } else { + hitsTimeInNanos.addAndGet(cacheRequestTime); } if (response == NO_RESULT) { @@ -99,6 +118,23 @@ public int count() { return cache.count(); } + /** + * Returns stats about this cache as of this moment. There is no guarantee that the counts reconcile (for example hits + misses = count) + * because no locking is performed when requesting these stats. + * @return Current stats about this cache + */ + public CacheStats getCacheStats() { + Cache.CacheStats stats = cache.stats(); + return new CacheStats( + cache.count(), + stats.getHits(), + stats.getMisses(), + stats.getEvictions(), + TimeValue.nsecToMSec(hitsTimeInNanos.get()), + TimeValue.nsecToMSec(missesTimeInNanos.get()) + ); + } + /** * The key to use for the cache. Since this cache can span multiple geoip processors that all use different databases, the database * path is needed to be included in the cache key. For example, if we only used the IP address as the key the City and ASN the same diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/CacheStats.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/CacheStats.java new file mode 100644 index 0000000000000..16a234e37e99e --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/CacheStats.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest.geoip.stats; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +public record CacheStats(long count, long hits, long misses, long evictions, long hitsTimeInMillis, long missesTimeInMillis) + implements + Writeable { + + public CacheStats(StreamInput streamInput) throws IOException { + this( + streamInput.readLong(), + streamInput.readLong(), + streamInput.readLong(), + streamInput.readLong(), + streamInput.readLong(), + streamInput.readLong() + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(count); + out.writeLong(hits); + out.writeLong(misses); + out.writeLong(evictions); + out.writeLong(hitsTimeInMillis); + out.writeLong(missesTimeInMillis); + } +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java index db1242888ca82..e2aec479a6742 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java @@ -130,6 +130,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (response.configDatabases.isEmpty() == false) { builder.array("config_databases", response.configDatabases.toArray(String[]::new)); } + builder.startObject("cache_stats"); + CacheStats cacheStats = response.cacheStats; + builder.field("count", cacheStats.count()); + builder.field("hits", cacheStats.hits()); + builder.field("misses", cacheStats.misses()); + builder.field("evictions", cacheStats.evictions()); + builder.field("hits_time_in_millis", cacheStats.hitsTimeInMillis()); + builder.field("misses_time_in_millis", cacheStats.missesTimeInMillis()); + builder.endObject(); builder.endObject(); } builder.endObject(); @@ -154,6 +163,7 @@ public int hashCode() { public static class NodeResponse extends BaseNodeResponse { private final GeoIpDownloaderStats downloaderStats; + private final CacheStats cacheStats; private final Set databases; private final Set filesInTemp; private final Set configDatabases; @@ -161,6 +171,11 @@ public static class NodeResponse extends BaseNodeResponse { protected NodeResponse(StreamInput in) throws IOException { super(in); downloaderStats = in.readBoolean() ? new GeoIpDownloaderStats(in) : null; + if (in.getTransportVersion().onOrAfter(TransportVersions.GEOIP_CACHE_STATS)) { + cacheStats = in.readBoolean() ? new CacheStats(in) : null; + } else { + cacheStats = null; + } databases = in.readCollectionAsImmutableSet(StreamInput::readString); filesInTemp = in.readCollectionAsImmutableSet(StreamInput::readString); configDatabases = in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) @@ -171,12 +186,14 @@ protected NodeResponse(StreamInput in) throws IOException { protected NodeResponse( DiscoveryNode node, GeoIpDownloaderStats downloaderStats, + CacheStats cacheStats, Set databases, Set filesInTemp, Set configDatabases ) { super(node); this.downloaderStats = downloaderStats; + this.cacheStats = cacheStats; this.databases = Set.copyOf(databases); this.filesInTemp = Set.copyOf(filesInTemp); this.configDatabases = Set.copyOf(configDatabases); @@ -205,6 +222,12 @@ public void writeTo(StreamOutput out) throws IOException { if (downloaderStats != null) { downloaderStats.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.GEOIP_CACHE_STATS)) { + out.writeBoolean(cacheStats != null); + if (cacheStats != null) { + cacheStats.writeTo(out); + } + } out.writeStringCollection(databases); out.writeStringCollection(filesInTemp); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)) { @@ -218,6 +241,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; NodeResponse that = (NodeResponse) o; return downloaderStats.equals(that.downloaderStats) + && Objects.equals(cacheStats, that.cacheStats) && databases.equals(that.databases) && filesInTemp.equals(that.filesInTemp) && Objects.equals(configDatabases, that.configDatabases); @@ -225,7 +249,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(downloaderStats, databases, filesInTemp, configDatabases); + return Objects.hash(downloaderStats, cacheStats, databases, filesInTemp, configDatabases); } } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java index 13f9544e1b9e4..1e1778a81c4a8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java @@ -76,9 +76,11 @@ protected NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throw protected NodeResponse nodeOperation(NodeRequest request, Task task) { GeoIpDownloader geoIpTask = geoIpDownloaderTaskExecutor.getCurrentTask(); GeoIpDownloaderStats downloaderStats = geoIpTask == null || geoIpTask.getStatus() == null ? null : geoIpTask.getStatus(); + CacheStats cacheStats = registry.getCacheStats(); return new NodeResponse( transportService.getLocalNode(), downloaderStats, + cacheStats, registry.getAvailableDatabases(), registry.getFilesInTemp(), registry.getConfigDatabases() diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java index d049ca3f9bcd0..03ab51d510176 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java @@ -11,12 +11,16 @@ import com.maxmind.geoip2.model.AbstractResponse; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.ingest.geoip.stats.CacheStats; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; public class GeoIpCacheTests extends ESTestCase { @@ -83,4 +87,33 @@ public void testInvalidInit() { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new GeoIpCache(-1)); assertEquals("geoip max cache size must be 0 or greater", ex.getMessage()); } + + public void testGetCacheStats() { + final long maxCacheSize = 2; + final AtomicLong testNanoTime = new AtomicLong(0); + // We use a relative time provider that increments 1ms every time it is called. So each operation appears to take 1ms + GeoIpCache cache = new GeoIpCache(maxCacheSize, () -> testNanoTime.addAndGet(TimeValue.timeValueMillis(1).getNanos())); + AbstractResponse response = mock(AbstractResponse.class); + String databasePath = "path/to/db1"; + InetAddress key1 = InetAddresses.forString("127.0.0.1"); + InetAddress key2 = InetAddresses.forString("127.0.0.2"); + InetAddress key3 = InetAddresses.forString("127.0.0.3"); + + cache.putIfAbsent(key1, databasePath, ip -> response); // cache miss + cache.putIfAbsent(key2, databasePath, ip -> response); // cache miss + cache.putIfAbsent(key1, databasePath, ip -> response); // cache hit + cache.putIfAbsent(key1, databasePath, ip -> response); // cache hit + cache.putIfAbsent(key1, databasePath, ip -> response); // cache hit + cache.putIfAbsent(key3, databasePath, ip -> response); // cache miss, key2 will be evicted + cache.putIfAbsent(key2, databasePath, ip -> response); // cache miss, key1 will be evicted + CacheStats cacheStats = cache.getCacheStats(); + assertThat(cacheStats.count(), equalTo(maxCacheSize)); + assertThat(cacheStats.hits(), equalTo(3L)); + assertThat(cacheStats.misses(), equalTo(4L)); + assertThat(cacheStats.evictions(), equalTo(2L)); + // There are 3 hits, each taking 1ms: + assertThat(cacheStats.hitsTimeInMillis(), equalTo(3L)); + // There are 4 misses. Each is made up of a cache query, and a database query, each being 1ms: + assertThat(cacheStats.missesTimeInMillis(), equalTo(8L)); + } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/CacheStatsSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/CacheStatsSerializingTests.java new file mode 100644 index 0000000000000..fa6b0c366d2d9 --- /dev/null +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/CacheStatsSerializingTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest.geoip.stats; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class CacheStatsSerializingTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return CacheStats::new; + } + + @Override + protected CacheStats createTestInstance() { + return createRandomInstance(); + } + + @Override + protected CacheStats mutateInstance(CacheStats instance) throws IOException { + long count = instance.count(); + long hits = instance.hits(); + long misses = instance.misses(); + long evictions = instance.evictions(); + long hitsTimeInMillis = instance.hitsTimeInMillis(); + long missesTimeInMillis = instance.missesTimeInMillis(); + return switch (between(0, 5)) { + case 0 -> new CacheStats( + randomValueOtherThan(count, ESTestCase::randomLong), + hits, + misses, + evictions, + hitsTimeInMillis, + missesTimeInMillis + ); + case 1 -> new CacheStats( + count, + randomValueOtherThan(hits, ESTestCase::randomLong), + misses, + evictions, + hitsTimeInMillis, + missesTimeInMillis + ); + case 2 -> new CacheStats( + count, + hits, + randomValueOtherThan(misses, ESTestCase::randomLong), + evictions, + hitsTimeInMillis, + missesTimeInMillis + ); + case 3 -> new CacheStats( + count, + hits, + misses, + randomValueOtherThan(evictions, ESTestCase::randomLong), + hitsTimeInMillis, + missesTimeInMillis + ); + case 4 -> new CacheStats( + count, + hits, + misses, + evictions, + randomValueOtherThan(hitsTimeInMillis, ESTestCase::randomLong), + missesTimeInMillis + ); + case 5 -> new CacheStats( + count, + hits, + misses, + evictions, + hitsTimeInMillis, + randomValueOtherThan(missesTimeInMillis, ESTestCase::randomLong) + ); + default -> throw new IllegalStateException("Unexpected value"); + }; + } + + static CacheStats createRandomInstance() { + return new CacheStats(randomLong(), randomLong(), randomLong(), randomLong(), randomLong(), randomLong()); + } +} diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java index 1008dcf56c4f1..6ff2e589270e1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseSerializingTests.java @@ -40,6 +40,7 @@ static GeoIpStatsAction.NodeResponse createRandomInstance() { return new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), + randomBoolean() ? null : CacheStatsSerializingTests.createRandomInstance(), databases, files, configDatabases diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java index 27a332c3b42f9..919f82a4cbc1f 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsActionNodeResponseTests.java @@ -28,6 +28,7 @@ public void testInputsAreDefensivelyCopied() { GeoIpStatsAction.NodeResponse nodeResponse = new GeoIpStatsAction.NodeResponse( node, GeoIpDownloaderStatsSerializingTests.createRandomInstance(), + randomBoolean() ? null : CacheStatsSerializingTests.createRandomInstance(), databases, files, configDatabases diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/30_geoip_stats.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/30_geoip_stats.yml index 852b2047a47ec..3d28802aeaf68 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/30_geoip_stats.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/30_geoip_stats.yml @@ -8,3 +8,11 @@ - gte: { stats.databases_count: 0 } - gte: { stats.total_download_time: 0 } - is_true: nodes + - set: + nodes._arbitrary_key_: node_id + - gte: { nodes.$node_id.cache_stats.count: 0 } + - gte: { nodes.$node_id.cache_stats.hits: 0 } + - gte: { nodes.$node_id.cache_stats.misses: 0 } + - gte: { nodes.$node_id.cache_stats.evictions: 0 } + - gte: { nodes.$node_id.cache_stats.hits_time_in_millis: 0 } + - gte: { nodes.$node_id.cache_stats.misses_time_in_millis: 0 } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 72404e405edd1..5c5a536034508 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -174,6 +174,7 @@ static TransportVersion def(int id) { public static final TransportVersion TRACK_FLUSH_TIME_EXCLUDING_WAITING_ON_LOCKS = def(8_633_00_0); public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_EMBEDDINGS = def(8_634_00_0); public static final TransportVersion ILM_SHRINK_ENABLE_WRITE = def(8_635_00_0); + public static final TransportVersion GEOIP_CACHE_STATS = def(8_636_00_0); /* * STOP! READ THIS FIRST! No, really, From 84f1b2695327a418d19622019e1f3b55048b375e Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 17 Apr 2024 09:54:56 +0200 Subject: [PATCH 067/130] Update esql-version.asciidoc (#107547) Fix heading misalignment and typo. --- docs/reference/esql/esql-version.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/esql/esql-version.asciidoc b/docs/reference/esql/esql-version.asciidoc index 16bf1f66e3166..daeb796ecc5b1 100644 --- a/docs/reference/esql/esql-version.asciidoc +++ b/docs/reference/esql/esql-version.asciidoc @@ -13,7 +13,7 @@ [discrete] [[esql-versions-explanation]] -=== How versions work +==== How versions work {esql} language versions are independent of {es} versions. Versioning the language ensures that your queries will always @@ -45,5 +45,5 @@ POST /_query?format=txt We won't make breaking changes to released {esql} versions and versions will remain supported until they are deprecated. New features, bug fixes, and performance improvements -will be continue to be added to released {esql} versions, +will continue to be added to released {esql} versions, provided they do not involve breaking changes. From 1d0c470de0a93e8667a7e02780faf24b1c4d5a79 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 17 Apr 2024 10:55:32 +0200 Subject: [PATCH 068/130] Stop using ReleasableLock in o.e.c.cache.Cache to save O(10M) in heap (#107555) I have a couple heap dumps that show the lock wrapper alone waste O(10M) in heap for these things. Also, I suspect the indirection does cost non-trivial performance here in some cases. => lets spend a couple more lines of code to save that overhead --- .../org/elasticsearch/common/cache/Cache.java | 86 ++++++++++++++----- 1 file changed, 64 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/cache/Cache.java b/server/src/main/java/org/elasticsearch/common/cache/Cache.java index 98a4b90da73d5..e380375a29de5 100644 --- a/server/src/main/java/org/elasticsearch/common/cache/Cache.java +++ b/server/src/main/java/org/elasticsearch/common/cache/Cache.java @@ -8,7 +8,6 @@ package org.elasticsearch.common.cache; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.core.Tuple; import java.lang.reflect.Array; @@ -19,6 +18,7 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.LongAdder; +import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -178,10 +178,10 @@ private static final class Entry { */ private final class CacheSegment { // read/write lock protecting mutations to the segment - ReadWriteLock segmentLock = new ReentrantReadWriteLock(); + final ReadWriteLock segmentLock = new ReentrantReadWriteLock(); - ReleasableLock readLock = new ReleasableLock(segmentLock.readLock()); - ReleasableLock writeLock = new ReleasableLock(segmentLock.writeLock()); + final Lock readLock = segmentLock.readLock(); + final Lock writeLock = segmentLock.writeLock(); Map>> map; @@ -196,8 +196,11 @@ private final class CacheSegment { */ Entry get(K key, long now, boolean eagerEvict) { CompletableFuture> future; - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { future = map == null ? null : map.get(key); + } finally { + readLock.unlock(); } if (future != null) { Entry entry; @@ -213,8 +216,11 @@ Entry get(K key, long now, boolean eagerEvict) { if (isExpired(entry, now)) { misses.increment(); if (eagerEvict) { - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { evictEntry(entry); + } finally { + lruLock.unlock(); } } return null; @@ -240,7 +246,8 @@ Entry get(K key, long now, boolean eagerEvict) { Tuple, Entry> put(K key, V value, long now) { Entry entry = new Entry<>(key, value, now); Entry existing = null; - try (ReleasableLock ignored = writeLock.acquire()) { + writeLock.lock(); + try { try { if (map == null) { map = new HashMap<>(); @@ -252,6 +259,8 @@ Tuple, Entry> put(K key, V value, long now) { } catch (ExecutionException | InterruptedException e) { throw new IllegalStateException(e); } + } finally { + writeLock.unlock(); } return Tuple.tuple(entry, existing); } @@ -263,7 +272,8 @@ Tuple, Entry> put(K key, V value, long now) { */ void remove(K key) { CompletableFuture> future; - try (ReleasableLock ignored = writeLock.acquire()) { + writeLock.lock(); + try { if (map == null) { future = null; } else { @@ -272,6 +282,8 @@ void remove(K key) { map = null; } } + } finally { + writeLock.unlock(); } if (future != null) { evictions.increment(); @@ -290,7 +302,8 @@ void remove(K key) { void remove(K key, V value, boolean notify) { CompletableFuture> future; boolean removed = false; - try (ReleasableLock ignored = writeLock.acquire()) { + writeLock.lock(); + try { future = map == null ? null : map.get(key); try { if (future != null) { @@ -307,6 +320,8 @@ void remove(K key, V value, boolean notify) { } catch (ExecutionException | InterruptedException e) { throw new IllegalStateException(e); } + } finally { + writeLock.unlock(); } if (future != null && removed) { @@ -333,7 +348,7 @@ void remove(K key, V value, boolean notify) { Entry tail; // lock protecting mutations to the LRU list - private final ReleasableLock lruLock = new ReleasableLock(new ReentrantLock()); + private final ReentrantLock lruLock = new ReentrantLock(); /** * Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key. @@ -380,15 +395,18 @@ public V computeIfAbsent(K key, CacheLoader loader) throws ExecutionExcept // need a mechanism to ensure that load is invoked at most once, but we are not invoking load while holding // the segment lock; to do this, we atomically put a future in the map that can load the value, and then // get the value from this future on the thread that won the race to place the future into the segment map - CacheSegment segment = getCacheSegment(key); + final CacheSegment segment = getCacheSegment(key); CompletableFuture> future; CompletableFuture> completableFuture = new CompletableFuture<>(); - try (ReleasableLock ignored = segment.writeLock.acquire()) { + segment.writeLock.lock(); + try { if (segment.map == null) { segment.map = new HashMap<>(); } future = segment.map.putIfAbsent(key, completableFuture); + } finally { + segment.writeLock.unlock(); } BiFunction, Throwable, ? extends V> handler = (ok, ex) -> { @@ -396,7 +414,8 @@ public V computeIfAbsent(K key, CacheLoader loader) throws ExecutionExcept promote(ok, now); return ok.value; } else { - try (ReleasableLock ignored = segment.writeLock.acquire()) { + segment.writeLock.lock(); + try { CompletableFuture> sanity = segment.map == null ? null : segment.map.get(key); if (sanity != null && sanity.isCompletedExceptionally()) { segment.map.remove(key); @@ -404,6 +423,8 @@ public V computeIfAbsent(K key, CacheLoader loader) throws ExecutionExcept segment.map = null; } } + } finally { + segment.writeLock.unlock(); } return null; } @@ -461,13 +482,16 @@ private void put(K key, V value, long now) { CacheSegment segment = getCacheSegment(key); Tuple, Entry> tuple = segment.put(key, value, now); boolean replaced = false; - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { if (tuple.v2() != null && tuple.v2().state == State.EXISTING) { if (unlink(tuple.v2())) { replaced = true; } } promote(tuple.v1(), now); + } finally { + lruLock.unlock(); } if (replaced) { removalListener.onRemoval( @@ -479,8 +503,11 @@ private void put(K key, V value, long now) { private void notifyWithInvalidated(CompletableFuture> f) { try { Entry entry = f.get(); - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { delete(entry, RemovalNotification.RemovalReason.INVALIDATED); + } finally { + lruLock.unlock(); } } catch (ExecutionException e) { // ok @@ -521,7 +548,8 @@ public void invalidateAll() { Entry h; boolean[] haveSegmentLock = new boolean[NUMBER_OF_SEGMENTS]; - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { try { for (int i = 0; i < NUMBER_OF_SEGMENTS; i++) { segments[i].segmentLock.writeLock().lock(); @@ -546,6 +574,8 @@ public void invalidateAll() { } } } + } finally { + lruLock.unlock(); } while (h != null) { removalListener.onRemoval(new RemovalNotification<>(h.key, h.value, RemovalNotification.RemovalReason.INVALIDATED)); @@ -558,8 +588,11 @@ public void invalidateAll() { */ public void refresh() { long now = now(); - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { evict(now); + } finally { + lruLock.unlock(); } } @@ -589,7 +622,7 @@ public long weight() { * @return an LRU-ordered {@link Iterable} over the keys in the cache */ public Iterable keys() { - return () -> new Iterator() { + return () -> new Iterator<>() { private final CacheIterator iterator = new CacheIterator(head); @Override @@ -617,7 +650,7 @@ public void remove() { * @return an LRU-ordered {@link Iterable} over the values in the cache */ public Iterable values() { - return () -> new Iterator() { + return () -> new Iterator<>() { private final CacheIterator iterator = new CacheIterator(head); @Override @@ -647,7 +680,8 @@ public void remove() { */ public void forEach(BiConsumer consumer) { for (CacheSegment segment : segments) { - try (ReleasableLock ignored = segment.readLock.acquire()) { + segment.readLock.lock(); + try { if (segment.map == null) { continue; } @@ -661,6 +695,8 @@ public void forEach(BiConsumer consumer) { throw new IllegalStateException(e); } } + } finally { + segment.readLock.unlock(); } } } @@ -692,9 +728,12 @@ public void remove() { if (entry != null) { CacheSegment segment = getCacheSegment(entry.key); segment.remove(entry.key, entry.value, false); - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { current = null; delete(entry, RemovalNotification.RemovalReason.INVALIDATED); + } finally { + lruLock.unlock(); } } } @@ -736,7 +775,8 @@ public long getEvictions() { private void promote(Entry entry, long now) { boolean promoted = true; - try (ReleasableLock ignored = lruLock.acquire()) { + lruLock.lock(); + try { switch (entry.state) { case DELETED -> promoted = false; case EXISTING -> relinkAtHead(entry); @@ -745,6 +785,8 @@ private void promote(Entry entry, long now) { if (promoted) { evict(now); } + } finally { + lruLock.unlock(); } } From 6c4e01e331f537eafe82ed576ca3cbe39f175ae8 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 17 Apr 2024 12:09:41 +0300 Subject: [PATCH 069/130] Revert "Muting (#107484)" (#107500) This reverts commit ae23a6f85a29840aa104a99d9fc5ef073e649d07. --- .../org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index fc76ae2c67919..5ca5da555718b 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -395,7 +395,6 @@ public void testILMWaitsForTimeSeriesEndTimeToLapse() throws Exception { }, 30, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103981") @TestLogging(value = "org.elasticsearch.xpack.ilm:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/103981") public void testRollupNonTSIndex() throws Exception { createIndex(index, alias, false); From a94f2b056a44dba3d08a8afdbf28cc5baf785314 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 17 Apr 2024 10:41:26 +0100 Subject: [PATCH 070/130] Always validate node ID on relocation (#107420) Follow-up to complete the change started in #107407, removing the temporary compatibility shim. --- .../elasticsearch/index/shard/IndexShard.java | 27 +++++-------------- .../recovery/RecoverySourceHandler.java | 7 ++++- 2 files changed, 13 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index a52b289493cd6..66427ddcee291 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -757,16 +757,6 @@ public IndexShardState markAsRecovering(String reason, RecoveryState recoverySta private final AtomicBoolean primaryReplicaResyncInProgress = new AtomicBoolean(); - // temporary compatibility shim while adding targetNodeId parameter to dependencies - @Deprecated(forRemoval = true) - public void relocated( - final String targetAllocationId, - final BiConsumer> consumer, - final ActionListener listener - ) throws IllegalIndexShardStateException, IllegalStateException { - relocated(null, targetAllocationId, consumer, listener); - } - /** * Completes the relocation. Operations are blocked and current operations are drained before changing state to relocated. The provided * {@link BiConsumer} is executed after all operations are successfully blocked. @@ -868,8 +858,7 @@ public void onFailure(Exception e) { } } - // TODO only nullable temporarily, remove once deprecated relocated() override is removed, see ES-6725 - private void verifyRelocatingState(@Nullable String targetNodeId) { + private void verifyRelocatingState(String targetNodeId) { if (state != IndexShardState.STARTED) { throw new IndexShardNotStartedException(shardId, state); } @@ -883,14 +872,12 @@ private void verifyRelocatingState(@Nullable String targetNodeId) { throw new IllegalIndexShardStateException(shardId, IndexShardState.STARTED, ": shard is no longer relocating " + shardRouting); } - if (targetNodeId != null) { - if (targetNodeId.equals(shardRouting.relocatingNodeId()) == false) { - throw new IllegalIndexShardStateException( - shardId, - IndexShardState.STARTED, - ": shard is no longer relocating to node [" + targetNodeId + "]: " + shardRouting - ); - } + if (Objects.equals(targetNodeId, shardRouting.relocatingNodeId()) == false) { + throw new IllegalIndexShardStateException( + shardId, + IndexShardState.STARTED, + ": shard is no longer relocating to node [" + targetNodeId + "]: " + shardRouting + ); } if (primaryReplicaResyncInProgress.get()) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 618bc847e3a7f..538cfdabef324 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -1287,7 +1287,12 @@ void finalizeRecovery(long targetLocalCheckpoint, long trimAboveSeqNo, ActionLis logger.trace("performing relocation hand-off"); cancellableThreads.execute( // this acquires all IndexShard operation permits and will thus delay new recoveries until it is done - () -> shard.relocated(request.targetAllocationId(), recoveryTarget::handoffPrimaryContext, finalStep) + () -> shard.relocated( + request.targetNode().getId(), + request.targetAllocationId(), + recoveryTarget::handoffPrimaryContext, + finalStep + ) ); /* * if the recovery process fails after disabling primary mode on the source shard, both relocation source and From 2f9e753463909f820493fd191ecc8f8bffb30b29 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 17 Apr 2024 11:49:02 +0100 Subject: [PATCH 071/130] Add setting for max connections to S3 (#107533) The S3 SDK permits changing the maximum number of concurrent connections that it will open, but today there's no way to adjust this setting within Elasticsearch. This commit adds a setting for this parameter. --- docs/changelog/107533.yaml | 5 +++++ .../snapshot-restore/repository-s3.asciidoc | 4 ++++ .../repositories/s3/S3ClientSettings.java | 18 ++++++++++++++++++ .../repositories/s3/S3RepositoryPlugin.java | 1 + .../repositories/s3/S3Service.java | 1 + .../repositories/s3/S3ClientSettingsTests.java | 17 +++++++++++++++++ 6 files changed, 46 insertions(+) create mode 100644 docs/changelog/107533.yaml diff --git a/docs/changelog/107533.yaml b/docs/changelog/107533.yaml new file mode 100644 index 0000000000000..da95cfd5b312e --- /dev/null +++ b/docs/changelog/107533.yaml @@ -0,0 +1,5 @@ +pr: 107533 +summary: Add setting for max connections to S3 +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 9ee630c37eee2..9fd1724bb0548 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -159,6 +159,10 @@ settings belong in the `elasticsearch.yml` file. of data over an established, open connection to the repository before it closes the connection. The default value is 50 seconds. +`max_connections`:: + + The maximum number of concurrent connections to S3. The default value is `50`. + `max_retries`:: The number of retries to use when an S3 request fails. The default value is diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java index ab322786fcd43..3d36a08473b57 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java @@ -124,6 +124,13 @@ final class S3ClientSettings { key -> Setting.timeSetting(key, TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope) ); + /** The maximum number of concurrent connections to use. */ + static final Setting.AffixSetting MAX_CONNECTIONS_SETTING = Setting.affixKeySetting( + PREFIX, + "max_connections", + key -> Setting.intSetting(key, ClientConfiguration.DEFAULT_MAX_CONNECTIONS, 1, Property.NodeScope) + ); + /** The number of retries to use when an s3 request fails. */ static final Setting.AffixSetting MAX_RETRIES_SETTING = Setting.affixKeySetting( PREFIX, @@ -195,6 +202,9 @@ final class S3ClientSettings { /** The read timeout for the s3 client. */ final int readTimeoutMillis; + /** The maximum number of concurrent connections to use. */ + final int maxConnections; + /** The number of retries to use for the s3 client. */ final int maxRetries; @@ -223,6 +233,7 @@ private S3ClientSettings( String proxyUsername, String proxyPassword, int readTimeoutMillis, + int maxConnections, int maxRetries, boolean throttleRetries, boolean pathStyleAccess, @@ -239,6 +250,7 @@ private S3ClientSettings( this.proxyUsername = proxyUsername; this.proxyPassword = proxyPassword; this.readTimeoutMillis = readTimeoutMillis; + this.maxConnections = maxConnections; this.maxRetries = maxRetries; this.throttleRetries = throttleRetries; this.pathStyleAccess = pathStyleAccess; @@ -268,6 +280,7 @@ S3ClientSettings refine(Settings repositorySettings) { final int newReadTimeoutMillis = Math.toIntExact( getRepoSettingOrDefault(READ_TIMEOUT_SETTING, normalizedSettings, TimeValue.timeValueMillis(readTimeoutMillis)).millis() ); + final int newMaxConnections = getRepoSettingOrDefault(MAX_CONNECTIONS_SETTING, normalizedSettings, maxConnections); final int newMaxRetries = getRepoSettingOrDefault(MAX_RETRIES_SETTING, normalizedSettings, maxRetries); final boolean newThrottleRetries = getRepoSettingOrDefault(USE_THROTTLE_RETRIES_SETTING, normalizedSettings, throttleRetries); final boolean newPathStyleAccess = getRepoSettingOrDefault(USE_PATH_STYLE_ACCESS, normalizedSettings, pathStyleAccess); @@ -290,6 +303,7 @@ S3ClientSettings refine(Settings repositorySettings) { && proxyPort == newProxyPort && proxyScheme == newProxyScheme && newReadTimeoutMillis == readTimeoutMillis + && maxConnections == newMaxConnections && maxRetries == newMaxRetries && newThrottleRetries == throttleRetries && Objects.equals(credentials, newCredentials) @@ -309,6 +323,7 @@ S3ClientSettings refine(Settings repositorySettings) { proxyUsername, proxyPassword, newReadTimeoutMillis, + newMaxConnections, newMaxRetries, newThrottleRetries, newPathStyleAccess, @@ -417,6 +432,7 @@ static S3ClientSettings getClientSettings(final Settings settings, final String proxyUsername.toString(), proxyPassword.toString(), Math.toIntExact(getConfigValue(settings, clientName, READ_TIMEOUT_SETTING).millis()), + getConfigValue(settings, clientName, MAX_CONNECTIONS_SETTING), getConfigValue(settings, clientName, MAX_RETRIES_SETTING), getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), getConfigValue(settings, clientName, USE_PATH_STYLE_ACCESS), @@ -438,6 +454,7 @@ public boolean equals(final Object o) { final S3ClientSettings that = (S3ClientSettings) o; return proxyPort == that.proxyPort && readTimeoutMillis == that.readTimeoutMillis + && maxConnections == that.maxConnections && maxRetries == that.maxRetries && throttleRetries == that.throttleRetries && Objects.equals(credentials, that.credentials) @@ -465,6 +482,7 @@ public int hashCode() { proxyPassword, readTimeoutMillis, maxRetries, + maxConnections, throttleRetries, disableChunkedEncoding, region, diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index 26047c3b416a7..1092b76001cb3 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -125,6 +125,7 @@ public List> getSettings() { S3ClientSettings.PROXY_USERNAME_SETTING, S3ClientSettings.PROXY_PASSWORD_SETTING, S3ClientSettings.READ_TIMEOUT_SETTING, + S3ClientSettings.MAX_CONNECTIONS_SETTING, S3ClientSettings.MAX_RETRIES_SETTING, S3ClientSettings.USE_THROTTLE_RETRIES_SETTING, S3ClientSettings.USE_PATH_STYLE_ACCESS, diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index c8a7cc12a90f4..4a1308fa59419 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -242,6 +242,7 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { clientConfiguration.setSignerOverride(clientSettings.signerOverride); } + clientConfiguration.setMaxConnections(clientSettings.maxConnections); clientConfiguration.setMaxErrorRetry(clientSettings.maxRetries); clientConfiguration.setUseThrottleRetries(clientSettings.throttleRetries); clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java index 31bfd3a5e157f..c8aaa2a5e0a4b 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java @@ -42,6 +42,7 @@ public void testThereIsADefaultClientByDefault() { assertThat(defaultSettings.proxyUsername, is(emptyString())); assertThat(defaultSettings.proxyPassword, is(emptyString())); assertThat(defaultSettings.readTimeoutMillis, is(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT)); + assertThat(defaultSettings.maxConnections, is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS)); assertThat(defaultSettings.maxRetries, is(ClientConfiguration.DEFAULT_RETRY_POLICY.getMaxErrorRetry())); assertThat(defaultSettings.throttleRetries, is(ClientConfiguration.DEFAULT_THROTTLE_RETRIES)); } @@ -197,4 +198,20 @@ public void testSignerOverrideCanBeSet() { ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other")); assertThat(configuration.getSignerOverride(), is(signerOverride)); } + + public void testMaxConnectionsCanBeSet() { + final int maxConnections = between(1, 100); + final Map settings = S3ClientSettings.load( + Settings.builder().put("s3.client.other.max_connections", maxConnections).build() + ); + assertThat(settings.get("default").maxConnections, is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS)); + assertThat(settings.get("other").maxConnections, is(maxConnections)); + ClientConfiguration defaultConfiguration = S3Service.buildConfiguration(settings.get("default")); + assertThat(defaultConfiguration.getMaxConnections(), is(ClientConfiguration.DEFAULT_MAX_CONNECTIONS)); + ClientConfiguration configuration = S3Service.buildConfiguration(settings.get("other")); + assertThat(configuration.getMaxConnections(), is(maxConnections)); + + // the default appears in the docs so let's make sure it doesn't change: + assertEquals(50, ClientConfiguration.DEFAULT_MAX_CONNECTIONS); + } } From 9b91d9c072c9fb6acc7772239117808cba1c7682 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 17 Apr 2024 14:15:48 +0200 Subject: [PATCH 072/130] Adjust exception checking for geoip downloader error logging. (#107550) Sometimes an exception is wrapped multiple times and then these logs are being emitted: ``` org.elasticsearch.transport.RemoteTransportException: [es-es-index-64c4d7dcd-4d7qb][10.2.58.152:9300][cluster:admin/persistent/start] Caused by: org.elasticsearch.transport.RemoteTransportException: [es-es-index-64c4d7dcd-j7s7v][10.2.9.216:9300][cluster:admin/persistent/start] Caused by: org.elasticsearch.ResourceAlreadyExistsException: task with id {geoip-downloader} already exist at org.elasticsearch.persistent.PersistentTasksClusterService$1.execute(PersistentTasksClusterService.java:120) at org.elasticsearch.cluster.service.MasterService$UnbatchedExecutor.execute(MasterService.java:550) at org.elasticsearch.cluster.service.MasterService.innerExecuteTasks(MasterService.java:1039) at org.elasticsearch.cluster.service.MasterService.executeTasks(MasterService.java:1004) at org.elasticsearch.cluster.service.MasterService.executeAndPublishBatch(MasterService.java:232) at org.elasticsearch.cluster.service.MasterService$BatchingTaskQueue$Processor.lambda$run$2(MasterService.java:1645) at org.elasticsearch.action.ActionListener.run(ActionListener.java:356) at org.elasticsearch.cluster.service.MasterService$BatchingTaskQueue$Processor.run(MasterService.java:1642) at org.elasticsearch.cluster.service.MasterService$5.lambda$doRun$0(MasterService.java:1237) at org.elasticsearch.action.ActionListener.run(ActionListener.java:356) at org.elasticsearch.cluster.service.MasterService$5.doRun(MasterService.java:1216) at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:984) at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) at java.lang.Thread.run(Thread.java:1583) ``` In this case the real cause is `ResourceAlreadyExistsException`, which should't be logged as an error. Adjusted the exception cause checking to take into account that an exeception maybe wrapped twice by a `RemoteTransportException`. --- .../ingest/geoip/GeoIpDownloaderTaskExecutor.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 299e55d4d60a8..c04dffe82b3cf 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -347,7 +348,7 @@ private void startTask(Runnable onFailure) { new GeoIpTaskParams(), null, ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { - Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + Throwable t = e instanceof RemoteTransportException ? ExceptionsHelper.unwrapCause(e) : e; if (t instanceof ResourceAlreadyExistsException == false) { logger.error("failed to create geoip downloader task", e); onFailure.run(); @@ -360,7 +361,7 @@ private void stopTask(Runnable onFailure) { ActionListener> listener = ActionListener.wrap( r -> logger.debug("Stopped geoip downloader task"), e -> { - Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + Throwable t = e instanceof RemoteTransportException ? ExceptionsHelper.unwrapCause(e) : e; if (t instanceof ResourceNotFoundException == false) { logger.error("failed to remove geoip downloader task", e); onFailure.run(); @@ -373,7 +374,7 @@ private void stopTask(Runnable onFailure) { // regardless of whether DATABASES_INDEX is an alias, resolve it to a concrete index Index databasesIndex = databasesAbstraction.getWriteIndex(); client.admin().indices().prepareDelete(databasesIndex.getName()).execute(ActionListener.wrap(rr -> {}, e -> { - Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + Throwable t = e instanceof RemoteTransportException ? ExceptionsHelper.unwrapCause(e) : e; if (t instanceof ResourceNotFoundException == false) { logger.warn("failed to remove " + databasesIndex, e); } From 33a71e3289cbbc4984d42d5c2f26cfccf65cc44e Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 17 Apr 2024 14:37:07 +0200 Subject: [PATCH 073/130] [DOCS] Refactor book-scoped variables in `docs/reference/index.asciidoc` (#107413) * Remove `es-test-dir` book-scoped variable * Remove `plugins-examples-dir` book-scoped variable * Remove `:dependencies-dir:` and `:xes-repo-dir:` book-scoped variables - In `index.asciidoc`, two variables (`:dependencies-dir:` and `:xes-repo-dir:`) were removed. - In `sql/index.asciidoc`, the `:sql-tests:` path was updated to fuller path - In `esql/index.asciidoc`, the `:esql-tests:` path was updated idem * Replace `es-repo-dir` with `es-ref-dir` * Move `:include-xpack: true` to few files that use it, remove from index.asciidoc --- .../RestTestsFromDocSnippetTaskSpec.groovy | 6 +- .../categorize-text-aggregation.asciidoc | 6 +- .../inference-bucket-aggregation.asciidoc | 8 +- docs/reference/alias.asciidoc | 2 +- .../stemmer-override-tokenfilter.asciidoc | 2 +- docs/reference/api-conventions.asciidoc | 8 +- docs/reference/cat/alias.asciidoc | 14 +- docs/reference/cat/allocation.asciidoc | 18 +- docs/reference/cat/anomaly-detectors.asciidoc | 112 ++--- .../cat/component-templates.asciidoc | 14 +- docs/reference/cat/count.asciidoc | 10 +- docs/reference/cat/datafeeds.asciidoc | 40 +- .../reference/cat/dataframeanalytics.asciidoc | 20 +- docs/reference/cat/fielddata.asciidoc | 12 +- docs/reference/cat/health.asciidoc | 12 +- docs/reference/cat/indices.asciidoc | 20 +- docs/reference/cat/master.asciidoc | 14 +- docs/reference/cat/nodeattrs.asciidoc | 14 +- docs/reference/cat/nodes.asciidoc | 18 +- docs/reference/cat/pending_tasks.asciidoc | 16 +- docs/reference/cat/plugins.asciidoc | 14 +- docs/reference/cat/recovery.asciidoc | 22 +- docs/reference/cat/repositories.asciidoc | 14 +- docs/reference/cat/segments.asciidoc | 30 +- docs/reference/cat/shards.asciidoc | 16 +- docs/reference/cat/snapshots.asciidoc | 14 +- docs/reference/cat/tasks.asciidoc | 16 +- docs/reference/cat/templates.asciidoc | 14 +- docs/reference/cat/thread_pool.asciidoc | 16 +- docs/reference/cat/trainedmodel.asciidoc | 14 +- docs/reference/cat/transforms.asciidoc | 74 +-- .../delete-auto-follow-pattern.asciidoc | 2 +- .../get-auto-follow-pattern.asciidoc | 2 +- .../pause-auto-follow-pattern.asciidoc | 2 +- .../put-auto-follow-pattern.asciidoc | 2 +- .../resume-auto-follow-pattern.asciidoc | 2 +- .../ccr/apis/follow/get-follow-info.asciidoc | 2 +- .../apis/follow/post-pause-follow.asciidoc | 2 +- .../apis/follow/post-resume-follow.asciidoc | 2 +- .../ccr/apis/follow/post-unfollow.asciidoc | 2 +- .../ccr/apis/follow/put-follow.asciidoc | 2 +- .../reference/ccr/apis/get-ccr-stats.asciidoc | 2 +- docs/reference/ccr/getting-started.asciidoc | 2 +- docs/reference/cluster/get-settings.asciidoc | 4 +- docs/reference/cluster/health.asciidoc | 6 +- .../cluster/nodes-hot-threads.asciidoc | 4 +- docs/reference/cluster/nodes-info.asciidoc | 6 +- docs/reference/cluster/nodes-stats.asciidoc | 22 +- docs/reference/cluster/nodes-usage.asciidoc | 4 +- docs/reference/cluster/pending.asciidoc | 4 +- .../cluster/prevalidate-node-removal.asciidoc | 2 +- docs/reference/cluster/reroute.asciidoc | 2 +- docs/reference/cluster/state.asciidoc | 6 +- docs/reference/cluster/stats.asciidoc | 4 +- docs/reference/cluster/tasks.asciidoc | 14 +- .../cluster/update-desired-nodes.asciidoc | 2 +- .../cluster/update-settings.asciidoc | 6 +- .../data-streams/data-stream-apis.asciidoc | 32 +- .../lifecycle/apis/delete-lifecycle.asciidoc | 2 +- .../lifecycle/apis/explain-lifecycle.asciidoc | 2 +- .../apis/get-global-retention.asciidoc | 4 +- .../lifecycle/apis/get-lifecycle.asciidoc | 2 +- .../lifecycle/apis/put-lifecycle.asciidoc | 2 +- .../set-up-a-data-stream.asciidoc | 2 +- docs/reference/docs/bulk.asciidoc | 42 +- docs/reference/docs/delete-by-query.asciidoc | 44 +- docs/reference/docs/delete.asciidoc | 14 +- docs/reference/docs/get.asciidoc | 18 +- docs/reference/docs/index_.asciidoc | 18 +- docs/reference/docs/multi-get.asciidoc | 14 +- .../reference/docs/multi-termvectors.asciidoc | 22 +- docs/reference/docs/reindex.asciidoc | 12 +- docs/reference/docs/termvectors.asciidoc | 22 +- docs/reference/docs/update-by-query.asciidoc | 46 +- docs/reference/docs/update.asciidoc | 12 +- docs/reference/eql/eql-search-api.asciidoc | 6 +- docs/reference/eql/eql.asciidoc | 4 +- .../esql/esql-across-clusters.asciidoc | 10 +- docs/reference/esql/esql-enrich-data.asciidoc | 4 +- docs/reference/esql/esql-get-started.asciidoc | 6 +- .../esql/esql-index-options.asciidoc | 4 +- docs/reference/esql/index.asciidoc | 2 +- docs/reference/getting-started.asciidoc | 6 +- docs/reference/how-to/disk-usage.asciidoc | 2 +- docs/reference/how-to/indexing-speed.asciidoc | 2 +- docs/reference/how-to/knn-search.asciidoc | 2 +- ...lasticsearch-for-time-series-data.asciidoc | 12 +- .../ilm/apis/delete-lifecycle.asciidoc | 2 +- docs/reference/ilm/apis/explain.asciidoc | 2 +- .../reference/ilm/apis/get-lifecycle.asciidoc | 2 +- docs/reference/ilm/apis/get-status.asciidoc | 2 +- docs/reference/ilm/apis/move-to-step.asciidoc | 2 +- .../reference/ilm/apis/put-lifecycle.asciidoc | 2 +- .../apis/remove-policy-from-index.asciidoc | 2 +- docs/reference/ilm/apis/retry-policy.asciidoc | 2 +- docs/reference/ilm/apis/start.asciidoc | 2 +- docs/reference/ilm/apis/stop.asciidoc | 2 +- docs/reference/index-modules/blocks.asciidoc | 10 +- docs/reference/index.asciidoc | 7 - docs/reference/indices/add-alias.asciidoc | 2 +- docs/reference/indices/alias-exists.asciidoc | 4 +- docs/reference/indices/aliases.asciidoc | 2 +- .../indices/apis/reload-analyzers.asciidoc | 6 +- docs/reference/indices/clearcache.asciidoc | 6 +- docs/reference/indices/clone-index.asciidoc | 10 +- docs/reference/indices/close.asciidoc | 14 +- docs/reference/indices/create-index.asciidoc | 8 +- .../indices/dangling-index-delete.asciidoc | 4 +- .../indices/dangling-index-import.asciidoc | 4 +- .../indices/data-stream-stats.asciidoc | 2 +- docs/reference/indices/delete-alias.asciidoc | 2 +- .../delete-component-template.asciidoc | 4 +- .../indices/delete-data-stream.asciidoc | 2 +- .../indices/delete-index-template-v1.asciidoc | 2 +- .../indices/delete-index-template.asciidoc | 4 +- docs/reference/indices/delete-index.asciidoc | 8 +- docs/reference/indices/diskusage.asciidoc | 8 +- .../indices/downsample-data-stream.asciidoc | 2 +- .../indices/field-usage-stats.asciidoc | 12 +- docs/reference/indices/flush.asciidoc | 6 +- docs/reference/indices/forcemerge.asciidoc | 6 +- docs/reference/indices/get-alias.asciidoc | 6 +- .../indices/get-data-stream.asciidoc | 2 +- .../indices/get-field-mapping.asciidoc | 6 +- .../indices/get-index-template-v1.asciidoc | 8 +- docs/reference/indices/get-index.asciidoc | 12 +- docs/reference/indices/get-mapping.asciidoc | 10 +- docs/reference/indices/get-settings.asciidoc | 14 +- .../indices/index-template-exists-v1.asciidoc | 8 +- .../reference/indices/indices-exists.asciidoc | 12 +- docs/reference/indices/open-close.asciidoc | 10 +- .../indices/put-component-template.asciidoc | 10 +- .../indices/put-index-template-v1.asciidoc | 8 +- .../indices/put-index-template.asciidoc | 2 +- docs/reference/indices/put-mapping.asciidoc | 8 +- docs/reference/indices/recovery.asciidoc | 6 +- docs/reference/indices/refresh.asciidoc | 6 +- .../indices/resolve-cluster.asciidoc | 6 +- docs/reference/indices/resolve.asciidoc | 6 +- .../reference/indices/rollover-index.asciidoc | 16 +- docs/reference/indices/segments.asciidoc | 22 +- docs/reference/indices/shard-stores.asciidoc | 6 +- docs/reference/indices/shrink-index.asciidoc | 10 +- .../indices/simulate-template.asciidoc | 2 +- docs/reference/indices/split-index.asciidoc | 10 +- docs/reference/indices/stats.asciidoc | 18 +- .../indices/update-settings.asciidoc | 10 +- .../ingest/apis/delete-pipeline.asciidoc | 2 +- .../ingest/apis/get-pipeline.asciidoc | 2 +- .../ingest/apis/put-pipeline.asciidoc | 2 +- docs/reference/ingest/enrich.asciidoc | 4 +- .../ingest/processors/inference.asciidoc | 126 ++--- .../mapping/explicit-mapping.asciidoc | 4 +- .../mapping/mapping-settings-limit.asciidoc | 2 +- .../types/aggregate-metric-double.asciidoc | 4 +- .../mapping/types/dense-vector.asciidoc | 2 +- docs/reference/mapping/types/nested.asciidoc | 4 +- docs/reference/mapping/types/numeric.asciidoc | 6 +- .../migration/apis/deprecation.asciidoc | 2 +- .../migration/apis/feature-migration.asciidoc | 2 +- .../migrate_8_0/rest-api-changes.asciidoc | 2 +- .../anomaly-detection/apis/close-job.asciidoc | 4 +- .../apis/delete-calendar-event.asciidoc | 2 +- .../apis/delete-calendar-job.asciidoc | 4 +- .../apis/delete-calendar.asciidoc | 2 +- .../apis/delete-datafeed.asciidoc | 2 +- .../apis/delete-filter.asciidoc | 2 +- .../apis/delete-forecast.asciidoc | 2 +- .../apis/delete-job.asciidoc | 2 +- .../apis/delete-snapshot.asciidoc | 4 +- .../anomaly-detection/apis/flush-job.asciidoc | 2 +- .../anomaly-detection/apis/forecast.asciidoc | 2 +- .../apis/get-bucket.asciidoc | 14 +- .../apis/get-calendar-event.asciidoc | 4 +- .../apis/get-calendar.asciidoc | 4 +- .../apis/get-category.asciidoc | 4 +- .../apis/get-datafeed-stats.asciidoc | 32 +- .../apis/get-datafeed.asciidoc | 6 +- .../apis/get-filter.asciidoc | 4 +- .../apis/get-influencer.asciidoc | 14 +- ...-job-model-snapshot-upgrade-stats.asciidoc | 18 +- .../apis/get-job-stats.asciidoc | 106 ++-- .../anomaly-detection/apis/get-job.asciidoc | 42 +- .../apis/get-overall-buckets.asciidoc | 8 +- .../apis/get-record.asciidoc | 20 +- .../apis/get-snapshot.asciidoc | 12 +- .../anomaly-detection/apis/open-job.asciidoc | 2 +- .../anomaly-detection/apis/post-data.asciidoc | 2 +- .../apis/preview-datafeed.asciidoc | 2 +- .../apis/put-calendar-job.asciidoc | 4 +- .../apis/put-calendar.asciidoc | 2 +- .../apis/put-datafeed.asciidoc | 40 +- .../apis/put-filter.asciidoc | 2 +- .../anomaly-detection/apis/put-job.asciidoc | 132 ++--- .../anomaly-detection/apis/reset-job.asciidoc | 2 +- .../apis/revert-snapshot.asciidoc | 4 +- .../apis/start-datafeed.asciidoc | 2 +- .../apis/stop-datafeed.asciidoc | 2 +- .../apis/update-datafeed.asciidoc | 38 +- .../apis/update-filter.asciidoc | 2 +- .../apis/update-job.asciidoc | 60 +-- .../apis/update-snapshot.asciidoc | 6 +- .../apis/upgrade-job-model-snapshot.asciidoc | 4 +- .../apis/validate-detector.asciidoc | 36 +- .../ml/common/apis/get-ml-memory.asciidoc | 8 +- .../apis/delete-dfanalytics.asciidoc | 2 +- .../apis/explain-dfanalytics.asciidoc | 2 +- .../apis/get-dfanalytics-stats.asciidoc | 126 ++--- .../apis/get-dfanalytics.asciidoc | 10 +- .../apis/preview-dfanalytics.asciidoc | 2 +- .../apis/put-dfanalytics.asciidoc | 132 ++--- .../apis/start-dfanalytics.asciidoc | 4 +- .../apis/stop-dfanalytics.asciidoc | 6 +- .../apis/update-dfanalytics.asciidoc | 8 +- docs/reference/ml/ml-shared.asciidoc | 8 +- ...ar-trained-model-deployment-cache.asciidoc | 2 +- .../apis/delete-trained-models.asciidoc | 2 +- .../apis/get-trained-models-stats.asciidoc | 20 +- .../apis/get-trained-models.asciidoc | 456 +++++++++--------- .../infer-trained-model-deployment.asciidoc | 2 +- .../apis/infer-trained-model.asciidoc | 262 +++++----- ...put-trained-model-definition-part.asciidoc | 2 +- .../put-trained-model-vocabulary.asciidoc | 2 +- .../apis/put-trained-models.asciidoc | 138 +++--- .../start-trained-model-deployment.asciidoc | 4 +- .../stop-trained-model-deployment.asciidoc | 4 +- .../update-trained-model-deployment.asciidoc | 2 +- docs/reference/modules/cluster/misc.asciidoc | 2 +- .../cluster/remote-clusters-api-key.asciidoc | 2 +- .../cluster/remote-clusters-cert.asciidoc | 2 +- .../modules/indices/index_management.asciidoc | 4 +- docs/reference/modules/node.asciidoc | 10 +- docs/reference/query-dsl/knn-query.asciidoc | 2 +- .../query-dsl/script-score-query.asciidoc | 2 +- ...ear-repositories-metering-archive.asciidoc | 4 +- .../apis/get-repositories-metering.asciidoc | 4 +- docs/reference/rest-api/common-parms.asciidoc | 2 +- docs/reference/rest-api/defs.asciidoc | 2 +- docs/reference/rest-api/index.asciidoc | 92 ++-- .../security/disable-user-profile.asciidoc | 2 +- .../security/enable-user-profile.asciidoc | 2 +- .../rest-api/security/query-api-key.asciidoc | 2 +- .../rest-api/security/query-user.asciidoc | 2 +- .../update-user-profile-data.asciidoc | 6 +- docs/reference/rest-api/usage.asciidoc | 2 +- .../reference/rest-api/watcher/start.asciidoc | 2 +- docs/reference/rest-api/watcher/stop.asciidoc | 2 +- .../apis/create-stored-script-api.asciidoc | 2 +- .../apis/delete-stored-script-api.asciidoc | 2 +- .../apis/get-stored-script-api.asciidoc | 2 +- docs/reference/scripting/engine.asciidoc | 2 +- docs/reference/search/count.asciidoc | 28 +- docs/reference/search/explain.asciidoc | 24 +- docs/reference/search/field-caps.asciidoc | 6 +- docs/reference/search/knn-search.asciidoc | 22 +- .../search/multi-search-template-api.asciidoc | 2 +- docs/reference/search/multi-search.asciidoc | 10 +- docs/reference/search/rank-eval.asciidoc | 6 +- docs/reference/search/retriever.asciidoc | 18 +- docs/reference/search/rrf.asciidoc | 6 +- docs/reference/search/search-shards.asciidoc | 12 +- .../search/search-template-api.asciidoc | 16 +- .../search/search-vector-tile-api.asciidoc | 2 +- .../search-your-data/knn-search.asciidoc | 2 +- .../search-across-clusters.asciidoc | 2 +- .../search-with-synonyms.asciidoc | 2 +- .../semantic-search-inference.asciidoc | 12 +- .../search-your-data/semantic-search.asciidoc | 10 +- docs/reference/search/search.asciidoc | 48 +- docs/reference/search/validate.asciidoc | 20 +- .../apis/mount-snapshot.asciidoc | 2 +- .../apis/node-cache-stats.asciidoc | 2 +- .../settings/monitoring-settings.asciidoc | 50 +- .../settings/security-settings.asciidoc | 184 +++---- docs/reference/settings/ssl-settings.asciidoc | 50 +- docs/reference/setup/add-nodes.asciidoc | 2 +- docs/reference/setup/configuration.asciidoc | 2 +- .../important-settings/path-settings.asciidoc | 6 +- docs/reference/setup/install/deb.asciidoc | 2 + docs/reference/setup/install/rpm.asciidoc | 4 + .../setup/install/targz-start.asciidoc | 2 +- docs/reference/setup/install/targz.asciidoc | 2 + .../setup/install/zip-windows-start.asciidoc | 2 +- .../setup/install/zip-windows.asciidoc | 2 + docs/reference/setup/logging-config.asciidoc | 2 +- docs/reference/setup/restart-cluster.asciidoc | 12 +- .../shutdown/apis/shutdown-delete.asciidoc | 2 +- .../shutdown/apis/shutdown-get.asciidoc | 2 +- .../shutdown/apis/shutdown-put.asciidoc | 2 +- .../slm/apis/slm-get-status.asciidoc | 2 +- docs/reference/slm/apis/slm-put.asciidoc | 4 +- docs/reference/slm/apis/slm-start.asciidoc | 2 +- docs/reference/slm/apis/slm-stop.asciidoc | 2 +- .../apis/clean-up-repo-api.asciidoc | 2 +- .../apis/clone-snapshot-api.asciidoc | 2 +- .../apis/create-snapshot-api.asciidoc | 2 +- .../apis/delete-repo-api.asciidoc | 2 +- .../apis/delete-snapshot-api.asciidoc | 2 +- .../apis/get-repo-api.asciidoc | 2 +- .../apis/get-snapshot-api.asciidoc | 2 +- .../apis/get-snapshot-status-api.asciidoc | 2 +- .../apis/put-repo-api.asciidoc | 2 +- .../apis/restore-snapshot-api.asciidoc | 2 +- .../apis/verify-repo-api.asciidoc | 2 +- .../repository-read-only-url.asciidoc | 2 +- .../repository-shared-file-system.asciidoc | 4 +- .../sql/apis/sql-search-api.asciidoc | 2 +- docs/reference/sql/index.asciidoc | 2 +- docs/reference/sql/security.asciidoc | 2 +- docs/reference/tab-widgets/ilm.asciidoc | 2 +- .../tab-widgets/register-fs-repo.asciidoc | 4 +- .../tab-widgets/snapshot-repo.asciidoc | 2 +- .../apis/find-field-structure.asciidoc | 22 +- .../apis/find-message-structure.asciidoc | 22 +- .../apis/find-structure.asciidoc | 30 +- .../transform/apis/delete-transform.asciidoc | 2 +- .../apis/get-transform-stats.asciidoc | 52 +- .../transform/apis/get-transform.asciidoc | 8 +- .../transform/apis/preview-transform.asciidoc | 58 +-- .../transform/apis/put-transform.asciidoc | 68 +-- .../transform/apis/reset-transform.asciidoc | 2 +- .../apis/schedule-now-transform.asciidoc | 2 +- .../transform/apis/stop-transform.asciidoc | 2 +- .../transform/apis/update-transform.asciidoc | 56 +-- .../diagnose-unassigned-shards.asciidoc | 2 +- .../common-issues/high-cpu-usage.asciidoc | 2 +- .../high-jvm-memory-pressure.asciidoc | 2 +- .../troubleshooting/data/add-tier.asciidoc | 2 +- .../data-tiers-mixed-with-node-attr.asciidoc | 2 +- .../data/enable-cluster-allocation.asciidoc | 2 +- .../data/enable-index-allocation.asciidoc | 2 +- .../increase-cluster-shard-limit.asciidoc | 2 +- .../data/increase-shard-limit.asciidoc | 2 +- .../data/increase-tier-capacity.asciidoc | 2 +- .../data/restore-from-snapshot.asciidoc | 2 +- .../troubleshooting/data/start-ilm.asciidoc | 2 +- .../troubleshooting/data/start-slm.asciidoc | 2 +- .../disk/fix-data-node-out-of-disk.asciidoc | 4 +- .../disk/fix-master-node-out-of-disk.asciidoc | 2 +- .../disk/fix-other-node-out-of-disk.asciidoc | 2 +- .../snapshot/add-repository.asciidoc | 2 +- .../repeated-snapshot-failures.asciidoc | 2 +- .../troubleshooting-shards-capacity.asciidoc | 2 +- docs/reference/upgrade.asciidoc | 2 +- 344 files changed, 2331 insertions(+), 2328 deletions(-) diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy index 6e86cba235886..45d3892121952 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/doc/RestTestsFromDocSnippetTaskSpec.groovy @@ -454,11 +454,11 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] [[ml-update-snapshot-request-body]] == {api-request-body-title} @@ -470,7 +470,7 @@ The following properties can be updated after the model snapshot is created: `retain`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=retain] [[ml-update-snapshot-example]] diff --git a/docs/reference/aggregations/bucket/categorize-text-aggregation.asciidoc b/docs/reference/aggregations/bucket/categorize-text-aggregation.asciidoc index 8ecd66d6ae7a2..399e6ed87581e 100644 --- a/docs/reference/aggregations/bucket/categorize-text-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/categorize-text-aggregation.asciidoc @@ -40,15 +40,15 @@ is an object it has the following properties: ===== `char_filter`:::: (array of strings or objects) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=char-filter] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=char-filter] `tokenizer`:::: (string or object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=tokenizer] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=tokenizer] `filter`:::: (array of strings or objects) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter] ===== `categorization_filters`:: diff --git a/docs/reference/aggregations/pipeline/inference-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/inference-bucket-aggregation.asciidoc index 911a123ded28e..064881925e8d8 100644 --- a/docs/reference/aggregations/pipeline/inference-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/inference-bucket-aggregation.asciidoc @@ -65,7 +65,7 @@ aggregations some options can be overridden for each of the two types of model. `num_top_feature_importance_values`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] [discrete] [[inference-agg-classification-opt]] @@ -73,15 +73,15 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num `num_top_classes`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `num_top_feature_importance_values`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] `prediction_field_type`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] [[inference-bucket-agg-example]] diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index 5b30501ed7c9d..e5c2db65778d8 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -308,7 +308,7 @@ POST _aliases ---- // TEST[s/^/PUT _data_stream\/logs-nginx.access-prod\nPUT _data_stream\/logs-my_app-default\n/] -include::{es-repo-dir}/indices/aliases.asciidoc[tag=write-index-defaults] +include::{es-ref-dir}/indices/aliases.asciidoc[tag=write-index-defaults] TIP: We recommend using data streams to store append-only time series data. If you need to update or delete existing time series data, you can perform update or delete operations diff --git a/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc index 99cbe695f8f9c..7d070a94cb0d1 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-override-tokenfilter.asciidoc @@ -48,7 +48,7 @@ Where the file looks like: [source,stemmer_override] -------------------------------------------------- -include::{es-test-dir}/cluster/config/analysis/stemmer_override.txt[] +include::{elasticsearch-root}/docs/src/test/cluster/config/analysis/stemmer_override.txt[] -------------------------------------------------- You can also define the overrides rules inline: diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 1a63af19b0a33..25881b707d724 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -205,18 +205,18 @@ on a remote cluster is currently not supported. For example, this will throw an Multi-target APIs that can target indices support the following query string parameters: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] The defaults settings for the above parameters depend on the API being used. Some multi-target APIs that can target indices also support the following query string parameter: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] NOTE: APIs with a single target, such as the <>, do not support multi-target syntax. diff --git a/docs/reference/cat/alias.asciidoc b/docs/reference/cat/alias.asciidoc index e7329d05534fc..b6459eaa93b82 100644 --- a/docs/reference/cat/alias.asciidoc +++ b/docs/reference/cat/alias.asciidoc @@ -39,19 +39,19 @@ wildcards (`*`). To retrieve all aliases, omit this parameter or use `*` or [[cat-alias-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] [[cat-alias-api-example]] ==== {api-examples-title} diff --git a/docs/reference/cat/allocation.asciidoc b/docs/reference/cat/allocation.asciidoc index 7bab1926cff09..0891406d1be4b 100644 --- a/docs/reference/cat/allocation.asciidoc +++ b/docs/reference/cat/allocation.asciidoc @@ -30,26 +30,26 @@ and their disk space. [[cat-allocation-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] [[cat-allocation-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-allocation-api-response-body]] ==== {api-response-body-title} diff --git a/docs/reference/cat/anomaly-detectors.asciidoc b/docs/reference/cat/anomaly-detectors.asciidoc index 30272e67286cc..607a88d1e1a5c 100644 --- a/docs/reference/cat/anomaly-detectors.asciidoc +++ b/docs/reference/cat/anomaly-detectors.asciidoc @@ -43,20 +43,20 @@ For more information about {anomaly-detect}, see ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[cat-anomaly-detectors-query-params]] ==== {api-query-parms-title} `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + If you do not specify which columns to include, the API returns the default columns. If you explicitly specify one or more columns, it returns only the @@ -65,75 +65,75 @@ specified columns. Valid columns are: `assignment_explanation`, `ae`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-anomaly-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-anomaly-jobs] `buckets.count`, `bc`, `bucketsCount`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-count-anomaly-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-count-anomaly-jobs] `buckets.time.exp_avg`, `btea`, `bucketsTimeExpAvg`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average] `buckets.time.exp_avg_hour`, `bteah`, `bucketsTimeExpAvgHour`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average-hour] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average-hour] `buckets.time.max`, `btmax`, `bucketsTimeMax`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-maximum] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-maximum] `buckets.time.min`, `btmin`, `bucketsTimeMin`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-minimum] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-minimum] `buckets.time.total`, `btt`, `bucketsTimeTotal`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-total] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-total] `data.buckets`, `db`, `dataBuckets`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-count] `data.earliest_record`, `der`, `dataEarliestRecord`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=earliest-record-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=earliest-record-timestamp] `data.empty_buckets`, `deb`, `dataEmptyBuckets`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=empty-bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=empty-bucket-count] `data.input_bytes`, `dib`, `dataInputBytes`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=input-bytes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=input-bytes] `data.input_fields`, `dif`, `dataInputFields`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=input-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=input-field-count] `data.input_records`, `dir`, `dataInputRecords`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=input-record-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=input-record-count] `data.invalid_dates`, `did`, `dataInvalidDates`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=invalid-date-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=invalid-date-count] `data.last`, `dl`, `dataLast`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=last-data-time] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=last-data-time] `data.last_empty_bucket`, `dleb`, `dataLastEmptyBucket`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latest-empty-bucket-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latest-empty-bucket-timestamp] `data.last_sparse_bucket`, `dlsb`, `dataLastSparseBucket`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latest-sparse-record-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latest-sparse-record-timestamp] `data.latest_record`, `dlr`, `dataLatestRecord`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latest-record-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latest-record-timestamp] `data.missing_fields`, `dmf`, `dataMissingFields`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=missing-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=missing-field-count] `data.out_of_order_timestamps`, `doot`, `dataOutOfOrderTimestamps`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=out-of-order-timestamp-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=out-of-order-timestamp-count] `data.processed_fields`, `dpf`, `dataProcessedFields`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=processed-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=processed-field-count] `data.processed_records`, `dpr`, `dataProcessedRecords`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=processed-record-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=processed-record-count] `data.sparse_buckets`, `dsb`, `dataSparseBuckets`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=sparse-bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=sparse-bucket-count] `forecasts.memory.avg`, `fmavg`, `forecastsMemoryAvg`::: The average memory usage in bytes for forecasts related to the {anomaly-job}. @@ -177,99 +177,99 @@ The total runtime in milliseconds for forecasts related to the {anomaly-job}. `forecasts.total`, `ft`, `forecastsTotal`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=forecast-total] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=forecast-total] `id`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `model.bucket_allocation_failures`, `mbaf`, `modelBucketAllocationFailures`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-allocation-failures-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-allocation-failures-count] `model.by_fields`, `mbf`, `modelByFields`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-by-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-by-field-count] `model.bytes`, `mb`, `modelBytes`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-bytes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-bytes] `model.bytes_exceeded`, `mbe`, `modelBytesExceeded`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-bytes-exceeded] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-bytes-exceeded] `model.categorization_status`, `mcs`, `modelCategorizationStatus`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorization-status] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorization-status] `model.categorized_doc_count`, `mcdc`, `modelCategorizedDocCount`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorized-doc-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorized-doc-count] `model.dead_category_count`, `mdcc`, `modelDeadCategoryCount`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dead-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dead-category-count] `model.failed_category_count`, `mdcc`, `modelFailedCategoryCount`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=failed-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=failed-category-count] `model.frequent_category_count`, `mfcc`, `modelFrequentCategoryCount`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=frequent-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=frequent-category-count] `model.log_time`, `mlt`, `modelLogTime`::: The timestamp when the model stats were gathered, according to server time. `model.memory_limit`, `mml`, `modelMemoryLimit`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-anomaly-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-anomaly-jobs] `model.memory_status`, `mms`, `modelMemoryStatus`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-status] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-status] `model.over_fields`, `mof`, `modelOverFields`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-over-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-over-field-count] `model.partition_fields`, `mpf`, `modelPartitionFields`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-partition-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-partition-field-count] `model.rare_category_count`, `mrcc`, `modelRareCategoryCount`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=rare-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=rare-category-count] `model.timestamp`, `mt`, `modelTimestamp`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-timestamp] `model.total_category_count`, `mtcc`, `modelTotalCategoryCount`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-category-count] `node.address`, `na`, `nodeAddress`::: The network address of the node. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-jobs] `node.ephemeral_id`, `ne`, `nodeEphemeralId`::: The ephemeral ID of the node. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-jobs] `node.id`, `ni`, `nodeId`::: The unique identifier of the node. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-jobs] `node.name`, `nn`, `nodeName`::: The node name. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-jobs] `opened_time`, `ot`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=open-time] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=open-time] `state`, `s`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=state-anomaly-job] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=state-anomaly-job] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-anomaly-detectors-example]] ==== {api-examples-title} diff --git a/docs/reference/cat/component-templates.asciidoc b/docs/reference/cat/component-templates.asciidoc index e642899e7045c..596c86befd1b7 100644 --- a/docs/reference/cat/component-templates.asciidoc +++ b/docs/reference/cat/component-templates.asciidoc @@ -40,19 +40,19 @@ wildcard expressions. If omitted, all component templates are returned. [[cat-component-templates-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-component-templates-api-example]] diff --git a/docs/reference/cat/count.asciidoc b/docs/reference/cat/count.asciidoc index 5fdae8768e1ef..37e602c759020 100644 --- a/docs/reference/cat/count.asciidoc +++ b/docs/reference/cat/count.asciidoc @@ -43,15 +43,15 @@ and indices, omit this parameter or use `*` or `_all`. [[cat-count-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-count-api-example]] diff --git a/docs/reference/cat/datafeeds.asciidoc b/docs/reference/cat/datafeeds.asciidoc index 82a7853a569c3..9b6481191e59d 100644 --- a/docs/reference/cat/datafeeds.asciidoc +++ b/docs/reference/cat/datafeeds.asciidoc @@ -42,18 +42,18 @@ NOTE: This API returns a maximum of 10,000 jobs. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] [[cat-datafeeds-query-params]] ==== {api-query-parms-title} `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + If you do not specify which columns to include, the API returns the default columns. If you explicitly specify one or more columns, it returns only the @@ -62,60 +62,60 @@ specified columns. Valid columns are: `assignment_explanation`, `ae`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-datafeeds] `buckets.count`, `bc`, `bucketsCount`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-count] `id`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] `node.address`, `na`, `nodeAddress`::: The network address of the node. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] `node.ephemeral_id`, `ne`, `nodeEphemeralId`::: The ephemeral ID of the node. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] `node.id`, `ni`, `nodeId`::: The unique identifier of the node. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] `node.name`, `nn`, `nodeName`::: The node name. + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] `search.bucket_avg`, `sba`, `searchBucketAvg`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-bucket-avg] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-bucket-avg] `search.count`, `sc`, `searchCount`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-count] `search.exp_avg_hour`, `seah`, `searchExpAvgHour`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-exp-avg-hour] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-exp-avg-hour] `search.time`, `st`, `searchTime`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-time] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-time] `state`, `s`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=state-datafeed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=state-datafeed] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-datafeeds-example]] ==== {api-examples-title} diff --git a/docs/reference/cat/dataframeanalytics.asciidoc b/docs/reference/cat/dataframeanalytics.asciidoc index c39146ab2266d..4c236ecf61ffc 100644 --- a/docs/reference/cat/dataframeanalytics.asciidoc +++ b/docs/reference/cat/dataframeanalytics.asciidoc @@ -46,15 +46,15 @@ For more information, see <> and {ml-docs-setup-privileges} ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] [[cat-dfanalytics-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + If you do not specify which columns to include, the API returns the default columns. If you explicitly specify one or more columns, it returns only the @@ -63,14 +63,14 @@ specified columns. Valid columns are: `assignment_explanation`, `ae`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-dfanalytics] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-dfanalytics] `create_time`, `ct`, `createTime`::: (Default) The time when the {dfanalytics-job} was created. `description`, `d`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=description-dfa] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=description-dfa] `dest_index`, `di`, `destIndex`::: Name of the destination index. @@ -80,7 +80,7 @@ Contains messages about the reason why a {dfanalytics-job} failed. `id`::: (Default) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] `model_memory_limit`, `mml`, `modelMemoryLimit`::: The approximate maximum amount of memory resources that are permitted for the @@ -115,13 +115,13 @@ The type of analysis that the {dfanalytics-job} performs. `version`, `v`::: The {es} version number in which the {dfanalytics-job} was created. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-dfanalytics-example]] diff --git a/docs/reference/cat/fielddata.asciidoc b/docs/reference/cat/fielddata.asciidoc index 55b9e1f562774..376ef1d97057c 100644 --- a/docs/reference/cat/fielddata.asciidoc +++ b/docs/reference/cat/fielddata.asciidoc @@ -39,17 +39,17 @@ information. [[cat-fielddata-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-fielddata-api-example]] diff --git a/docs/reference/cat/health.asciidoc b/docs/reference/cat/health.asciidoc index 84661ae8ef320..04a11699d3ecf 100644 --- a/docs/reference/cat/health.asciidoc +++ b/docs/reference/cat/health.asciidoc @@ -51,22 +51,22 @@ over a longer period of time. See <>. [[cat-health-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] `ts` (timestamps):: (Optional, Boolean) If `true`, returns `HH:MM:SS` and {wikipedia}/Unix_time[Unix `epoch`] timestamps. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-health-api-example]] diff --git a/docs/reference/cat/indices.asciidoc b/docs/reference/cat/indices.asciidoc index 43b10c83264cb..64b90c4f8e353 100644 --- a/docs/reference/cat/indices.asciidoc +++ b/docs/reference/cat/indices.asciidoc @@ -62,11 +62,11 @@ and indices, omit this parameter or use `*` or `_all`. [[cat-indices-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] `health`:: + @@ -81,24 +81,24 @@ are: By default, the response includes indices of any health status. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[pri-flag]] `pri` (primary shards):: (Optional, Boolean) If `true`, the response only includes information from primary shards. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] [[cat-indices-api-example]] diff --git a/docs/reference/cat/master.asciidoc b/docs/reference/cat/master.asciidoc index 151e0ac6516aa..42348fc4939df 100644 --- a/docs/reference/cat/master.asciidoc +++ b/docs/reference/cat/master.asciidoc @@ -29,19 +29,19 @@ and name. [[cat-master-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-master-api-example]] diff --git a/docs/reference/cat/nodeattrs.asciidoc b/docs/reference/cat/nodeattrs.asciidoc index d4e07f153da21..2db0c3fc20279 100644 --- a/docs/reference/cat/nodeattrs.asciidoc +++ b/docs/reference/cat/nodeattrs.asciidoc @@ -27,9 +27,9 @@ Returns information about custom node attributes. [[cat-nodeattrs-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + -- If you do not specify which columns to include, the API returns the default columns in the order listed below. If you explicitly specify one or more columns, it only returns the specified columns. @@ -61,15 +61,15 @@ Process ID, such as `13061`. Bound transport port, such as `9300`. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-nodeattrs-api-example]] diff --git a/docs/reference/cat/nodes.asciidoc b/docs/reference/cat/nodes.asciidoc index da1ed532e41fa..bfee57d1daad7 100644 --- a/docs/reference/cat/nodes.asciidoc +++ b/docs/reference/cat/nodes.asciidoc @@ -27,15 +27,15 @@ Returns information about a cluster's nodes. [[cat-nodes-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] `full_id`:: (Optional, Boolean) If `true`, return the full node ID. If `false`, return the shortened node ID. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + -- If you do not specify which columns to include, the API returns the default columns in the order listed below. If you explicitly specify one or more columns, it only returns the specified columns. @@ -328,17 +328,17 @@ Number of mappings, including <> and <> fields. Estimated heap overhead, in bytes, of mappings on this node, which allows for 1KiB of heap for every mapped field. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] [[cat-nodes-api-example]] ==== {api-examples-title} diff --git a/docs/reference/cat/pending_tasks.asciidoc b/docs/reference/cat/pending_tasks.asciidoc index c576df17ced24..5dd6cb0688145 100644 --- a/docs/reference/cat/pending_tasks.asciidoc +++ b/docs/reference/cat/pending_tasks.asciidoc @@ -28,21 +28,21 @@ Returns cluster-level changes that have not yet been executed, similar to the [[cat-pending-tasks-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-pending-tasks-api-example]] diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc index 9a83887c484a8..a812556887b74 100644 --- a/docs/reference/cat/plugins.asciidoc +++ b/docs/reference/cat/plugins.asciidoc @@ -29,19 +29,19 @@ Returns a list of plugins running on each node of a cluster. [[cat-plugins-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-plugins-api-example]] diff --git a/docs/reference/cat/recovery.asciidoc b/docs/reference/cat/recovery.asciidoc index 76fb66c6703d5..058f4e69ae8e3 100644 --- a/docs/reference/cat/recovery.asciidoc +++ b/docs/reference/cat/recovery.asciidoc @@ -39,7 +39,7 @@ The cat recovery API returns information about shard recoveries, both ongoing and completed. It is a more compact view of the JSON <> API. -include::{es-repo-dir}/indices/recovery.asciidoc[tag=shard-recovery-desc] +include::{es-ref-dir}/indices/recovery.asciidoc[tag=shard-recovery-desc] [[cat-recovery-path-params]] @@ -53,25 +53,25 @@ and indices, omit this parameter or use `*` or `_all`. [[cat-recovery-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=active-only] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=active-only] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=detailed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=detailed] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-query-parm] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-query-parm] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-recovery-api-example]] diff --git a/docs/reference/cat/repositories.asciidoc b/docs/reference/cat/repositories.asciidoc index edeb66b954231..ec2f243c27bec 100644 --- a/docs/reference/cat/repositories.asciidoc +++ b/docs/reference/cat/repositories.asciidoc @@ -29,19 +29,19 @@ Returns the <> for a cluste [[cat-repositories-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-repositories-api-example]] diff --git a/docs/reference/cat/segments.asciidoc b/docs/reference/cat/segments.asciidoc index 900bebdb81685..872af679642d0 100644 --- a/docs/reference/cat/segments.asciidoc +++ b/docs/reference/cat/segments.asciidoc @@ -44,11 +44,11 @@ and indices, omit this parameter or use `*` or `_all`. [[cat-segments-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + -- If you do not specify which columns to include, the API returns the default @@ -71,39 +71,39 @@ Valid columns are: `segment`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment] `generation`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=generation] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=generation] `docs.count`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-count] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-count] `docs.deleted`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted] `size`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment-size] `size.memory`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=memory] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=memory] `committed`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=committed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=committed] `searchable`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment-search] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment-search] `version`:: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment-version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment-version] `compound`:: (Default) If `true`, the segment is stored in a compound file. This means Lucene @@ -113,11 +113,11 @@ merged all files from the segment in a single file to save file descriptors. ID of the node, such as `k0zy`. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-segments-api-example]] diff --git a/docs/reference/cat/shards.asciidoc b/docs/reference/cat/shards.asciidoc index c3d21f29bfaf2..74c017d86d8e8 100644 --- a/docs/reference/cat/shards.asciidoc +++ b/docs/reference/cat/shards.asciidoc @@ -44,11 +44,11 @@ and indices, omit this parameter or use `*` or `_all`. [[cat-shards-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + -- If you do not specify which columns to include, the API returns the default @@ -297,15 +297,15 @@ values include: -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-shards-api-example]] diff --git a/docs/reference/cat/snapshots.asciidoc b/docs/reference/cat/snapshots.asciidoc index 059fd58630bcc..820c4b56c783d 100644 --- a/docs/reference/cat/snapshots.asciidoc +++ b/docs/reference/cat/snapshots.asciidoc @@ -46,9 +46,9 @@ If any repository fails during the request, {es} returns an error. [[cat-snapshots-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + -- If you do not specify which columns to include, the API returns the default @@ -107,19 +107,19 @@ units>>. Reason for any snapshot failures. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] `ignore_unavailable`:: (Optional, Boolean) If `true`, the response does not include information from unavailable snapshots. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-snapshots-api-example]] diff --git a/docs/reference/cat/tasks.asciidoc b/docs/reference/cat/tasks.asciidoc index 1258c746bfb11..91d67baa72d70 100644 --- a/docs/reference/cat/tasks.asciidoc +++ b/docs/reference/cat/tasks.asciidoc @@ -41,13 +41,13 @@ of the JSON <> API. [[cat-tasks-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=detailed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=detailed] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] `nodes`:: (Optional, string) @@ -58,17 +58,17 @@ wildcard (`*`) expressions. (Optional, string) Parent task ID used to limit the response. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-tasks-api-response-codes]] ==== {api-response-codes-title} -include::{es-repo-dir}/cluster/tasks.asciidoc[tag=tasks-api-404] +include::{es-ref-dir}/cluster/tasks.asciidoc[tag=tasks-api-404] [[cat-tasks-api-examples]] diff --git a/docs/reference/cat/templates.asciidoc b/docs/reference/cat/templates.asciidoc index 61dce67615e1a..bcc8e9e4f5dc4 100644 --- a/docs/reference/cat/templates.asciidoc +++ b/docs/reference/cat/templates.asciidoc @@ -39,19 +39,19 @@ expressions. If omitted, all templates are returned. [[cat-templates-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-templates-api-example]] diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index 244cf643798aa..948ed9a1a7a30 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -40,9 +40,9 @@ request. Accepts wildcard expressions. [[cat-thread-pool-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + -- If you do not specify which columns to include, the API returns the default @@ -113,17 +113,17 @@ Type of thread pool. Returned values are `fixed`, `fixed_auto_queue_size`, `dire -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-thread-pool-api-example]] diff --git a/docs/reference/cat/trainedmodel.asciidoc b/docs/reference/cat/trainedmodel.asciidoc index 6f305be845fc4..74e83525ddfe1 100644 --- a/docs/reference/cat/trainedmodel.asciidoc +++ b/docs/reference/cat/trainedmodel.asciidoc @@ -41,11 +41,11 @@ For more information, see <> and {ml-docs-setup-privileges} [[cat-trained-model-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bytes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bytes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + If you do not specify which columns to include, the API returns the default columns. If you explicitly specify one or more columns, it returns only the @@ -102,13 +102,13 @@ measuring the computational complexity of the model. `version`, `v`::: The {es} version number in which the trained model was created. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-trained-model-example]] diff --git a/docs/reference/cat/transforms.asciidoc b/docs/reference/cat/transforms.asciidoc index 1723d4ab73b06..0d4e9b691ac5a 100644 --- a/docs/reference/cat/transforms.asciidoc +++ b/docs/reference/cat/transforms.asciidoc @@ -41,22 +41,22 @@ privileges. For more information, see <> and ``:: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] [[cat-transforms-api-query-params]] ==== {api-query-parms-title} `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms1] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms1] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=http-format] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=http-format] `from`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from-transforms] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-h] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-h] + If you do not specify which columns to include, the API returns the default columns. If you explicitly specify one or more columns, it returns only the @@ -66,14 +66,14 @@ Valid columns are: `changes_last_detection_time`, `cldt`::: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-changes-last-detected-at] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-changes-last-detected-at] `checkpoint`, `cp`::: (Default) The sequence number for the checkpoint. `checkpoint_duration_time_exp_avg`, `cdtea`, `checkpointTimeExpAvg`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-checkpoint-duration-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-checkpoint-duration-ms] `checkpoint_progress`, `c`, `checkpointProgress`::: (Default) @@ -83,106 +83,106 @@ The progress of the next checkpoint that is currently in progress. The time the {transform} was created. `delete_time`, `dtime`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=delete-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=delete-time-ms] `description`, `d`::: The description of the {transform}. `dest_index`, `di`, `destIndex`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-index] `documents_deleted`, `docd`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted-transform] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted-transform] `documents_indexed`, `doci`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-indexed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-indexed] `docs_per_second`, `dps`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] `documents_processed`, `docp`::: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-processed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-processed] `frequency`, `f`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=frequency] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=frequency] `id`::: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id] `index_failure`, `if`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-failures] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-failures] `index_time`, `itime`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-time-ms] `index_total`, `it`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-total] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-total] `indexed_documents_exp_avg`, `idea`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-indexed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-indexed] `last_search_time`, `lst`, `lastSearchTime`::: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-last-search-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-last-search-time] `max_page_search_size`, `mpsz`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] `pages_processed`, `pp`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pages-processed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pages-processed] `pipeline`, `p`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] `processed_documents_exp_avg`, `pdea`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-processed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-processed] `processing_time`, `pt`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=processing-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=processing-time-ms] `reason`, `r`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=state-transform-reason] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=state-transform-reason] `search_failure`, `sf`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-failures] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-failures] `search_time`, `stime`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-time-ms] `search_total`, `st`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-total] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-total] `source_index`, `si`, `sourceIndex`::: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] `state`, `s`::: (Default) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=state-transform] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=state-transform] `transform_type`, `tt`::: Indicates the type of {transform}: `batch` or `continuous`. `trigger_count`, `tc`::: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=trigger-count] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=trigger-count] `version`, `v`::: The version of {es} that existed on the node when the {transform} was created. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=help] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=help] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-s] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-s] `size`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=size-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=size-transforms] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=time] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cat-v] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cat-v] [[cat-transforms-api-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc index a78148388a931..1c72fb8742b93 100644 --- a/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/delete-auto-follow-pattern.asciidoc @@ -57,7 +57,7 @@ This API deletes a configured collection of [[ccr-delete-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-delete-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc index 4fa85d6ee638e..46ef288b05088 100644 --- a/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/get-auto-follow-pattern.asciidoc @@ -75,7 +75,7 @@ This API will return the specified auto-follow pattern collection. [[ccr-get-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-get-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc index ed0f242640698..1e64ab813e2ad 100644 --- a/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/pause-auto-follow-pattern.asciidoc @@ -43,7 +43,7 @@ meantime. [[ccr-pause-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-pause-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc index 4b5ff5a5eb930..d08997068f705 100644 --- a/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/put-auto-follow-pattern.asciidoc @@ -74,7 +74,7 @@ the new patterns. [[ccr-put-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-put-auto-follow-pattern-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc index 5028b0f3d4775..04da9b4a35ba0 100644 --- a/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc +++ b/docs/reference/ccr/apis/auto-follow/resume-auto-follow-pattern.asciidoc @@ -38,7 +38,7 @@ have been deleted or closed in the meantime. [[ccr-resume-auto-follow-pattern-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-resume-auto-follow-pattern-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc index fd3d24e41be59..68fd6e210f884 100644 --- a/docs/reference/ccr/apis/follow/get-follow-info.asciidoc +++ b/docs/reference/ccr/apis/follow/get-follow-info.asciidoc @@ -52,7 +52,7 @@ replication options and whether the follower indices are active or paused. [[ccr-get-follow-info-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[ccr-get-follow-info-response-body]] diff --git a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc index 58d5fbb03fa03..a4ab69aba8d84 100644 --- a/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-pause-follow.asciidoc @@ -56,7 +56,7 @@ following task. [[ccr-post-pause-follow-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-post-pause-follow-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc index b762f049bde62..47ba51a3fb8a0 100644 --- a/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-resume-follow.asciidoc @@ -69,7 +69,7 @@ returns, the follower index will resume fetching operations from the leader inde [[ccr-post-resume-follow-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-post-resume-follow-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc index e8ca3526bbc88..b96777b455d3b 100644 --- a/docs/reference/ccr/apis/follow/post-unfollow.asciidoc +++ b/docs/reference/ccr/apis/follow/post-unfollow.asciidoc @@ -63,7 +63,7 @@ irreversible operation. [[ccr-post-unfollow-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-post-unfollow-examples]] ==== {api-examples-title} diff --git a/docs/reference/ccr/apis/follow/put-follow.asciidoc b/docs/reference/ccr/apis/follow/put-follow.asciidoc index 11711432437ec..eb83e2a13dcf1 100644 --- a/docs/reference/ccr/apis/follow/put-follow.asciidoc +++ b/docs/reference/ccr/apis/follow/put-follow.asciidoc @@ -65,7 +65,7 @@ referenced leader index. When this API returns, the follower index exists, and follower shard requires transferring all the remote Lucene segment files to the follower index. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[ccr-put-follow-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index 2917e3f86372b..128df5e47c777 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -56,7 +56,7 @@ shard-level stats as in the <>. `timeout`:: (Optional, time) Controls the amount of time to wait for results. Defaults to unlimited. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[ccr-get-stats-response-body]] diff --git a/docs/reference/ccr/getting-started.asciidoc b/docs/reference/ccr/getting-started.asciidoc index d6c455b510dad..d30cd43a4db5e 100644 --- a/docs/reference/ccr/getting-started.asciidoc +++ b/docs/reference/ccr/getting-started.asciidoc @@ -159,7 +159,7 @@ cluster with cluster alias `leader`. connected to. ==== -include::{es-repo-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[tag=configure-ccr-privileges] +include::{es-ref-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[tag=configure-ccr-privileges] [[ccr-getting-started-follower-index]] ==== Create a follower index to replicate a specific index diff --git a/docs/reference/cluster/get-settings.asciidoc b/docs/reference/cluster/get-settings.asciidoc index 931ebc9759a81..5a9fe81df61c7 100644 --- a/docs/reference/cluster/get-settings.asciidoc +++ b/docs/reference/cluster/get-settings.asciidoc @@ -34,10 +34,10 @@ defined, but can also include the default settings by calling the ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] `include_defaults`:: (Optional, Boolean) If `true`, returns default cluster settings from the local node. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/cluster/health.asciidoc b/docs/reference/cluster/health.asciidoc index 561c092f66a60..3a4058a55ce16 100644 --- a/docs/reference/cluster/health.asciidoc +++ b/docs/reference/cluster/health.asciidoc @@ -61,9 +61,9 @@ To target all data streams and indices in a cluster, omit this parameter or use (Optional, string) Can be one of `cluster`, `indices` or `shards`. Controls the details level of the health information returned. Defaults to `cluster`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `wait_for_active_shards`:: (Optional, string) A number controlling to how many active shards to wait @@ -104,7 +104,7 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] (string) The name of the cluster. `status`:: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cluster-health-status] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cluster-health-status] `timed_out`:: (Boolean) If `false` the response returned within the period of diff --git a/docs/reference/cluster/nodes-hot-threads.asciidoc b/docs/reference/cluster/nodes-hot-threads.asciidoc index 552d41629e1e2..a7ee9604250d9 100644 --- a/docs/reference/cluster/nodes-hot-threads.asciidoc +++ b/docs/reference/cluster/nodes-hot-threads.asciidoc @@ -30,7 +30,7 @@ threads. [[cluster-nodes-hot-threads-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] [[cluster-nodes-hot-threads-api-query-params]] @@ -56,7 +56,7 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] troubleshooting, set this parameter to a large number (e.g. `9999`) to get information about all the threads in the system. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `type`:: (Optional, string) The type to sample. Available options are `block`, `cpu`, and diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index 16da5a25d1fbf..8ff7da3a16ad1 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -101,7 +101,7 @@ can also request the metric `_all` to retrieve all metrics, or you can request the metric `_none` to suppress all metrics and retrieve only the identity of the node. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] [[cluster-nodes-info-api-response-body]] ==== {api-response-body-title} @@ -182,9 +182,9 @@ running process: [[cluster-nodes-info-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[cluster-nodes-info-api-example]] diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 07328ba98bcec..a40d1f98cbd51 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -127,31 +127,31 @@ using metrics. * `dense_vector` -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] [[cluster-nodes-stats-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=completion-fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=completion-fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=fielddata-fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=fielddata-fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=groups] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=groups] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=level] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=level] `types`:: (Optional, string) A comma-separated list of document types for the `indexing` index metric. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-segment-file-sizes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-segment-file-sizes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] [role="child_attributes"] [[cluster-nodes-stats-api-response-body]] @@ -244,11 +244,11 @@ node. ======= `count`:: (integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-count] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-count] `deleted`:: (integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted] ======= `store`:: diff --git a/docs/reference/cluster/nodes-usage.asciidoc b/docs/reference/cluster/nodes-usage.asciidoc index c62c42a572284..6c53919bcfbbc 100644 --- a/docs/reference/cluster/nodes-usage.asciidoc +++ b/docs/reference/cluster/nodes-usage.asciidoc @@ -48,13 +48,13 @@ of features for each node. All the nodes selective options are explained that action has been called on the node. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] [[cluster-nodes-usage-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[cluster-nodes-usage-api-example]] diff --git a/docs/reference/cluster/pending.asciidoc b/docs/reference/cluster/pending.asciidoc index b82bdd8e022f4..3e87234c7d26c 100644 --- a/docs/reference/cluster/pending.asciidoc +++ b/docs/reference/cluster/pending.asciidoc @@ -34,9 +34,9 @@ might be reported by both task api and pending cluster tasks API. [[cluster-pending-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[cluster-pending-api-response-body]] diff --git a/docs/reference/cluster/prevalidate-node-removal.asciidoc b/docs/reference/cluster/prevalidate-node-removal.asciidoc index d7f0ed64d6c0f..16bf28c586687 100644 --- a/docs/reference/cluster/prevalidate-node-removal.asciidoc +++ b/docs/reference/cluster/prevalidate-node-removal.asciidoc @@ -34,7 +34,7 @@ Note that if the prevalidation result for a set of nodes returns `true` (i.e. it [[prevalidate-node-removal-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `names`:: (Optional, string) Comma-separated list of node names. diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index d88df96b50008..b4e4809ae73b4 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -112,7 +112,7 @@ query parameter, which will attempt a single retry round for these shards. (Optional, Boolean) If `true`, then retries allocation of shards that are blocked due to too many subsequent allocation failures. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[cluster-reroute-api-request-body]] diff --git a/docs/reference/cluster/state.asciidoc b/docs/reference/cluster/state.asciidoc index 2836757a94fb9..fcb2f5f2f5dcd 100644 --- a/docs/reference/cluster/state.asciidoc +++ b/docs/reference/cluster/state.asciidoc @@ -115,15 +115,15 @@ Defaults to `true`. that are open, closed or both. Available options: `open`, `closed`, `none`, `all`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] `ignore_unavailable`:: (Optional, Boolean) If `true`, unavailable indices (missing or closed) will be ignored. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `wait_for_metadata_version`:: (Optional, integer) Waits for the metadata version to be equal or greater diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index bdd3e166c22d6..26b3553c3c17f 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -33,7 +33,7 @@ memory usage) and information about the current nodes that form the cluster ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-filter] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-filter] [[cluster-stats-api-query-params]] @@ -86,7 +86,7 @@ Unique identifier for the cluster. the last time the cluster statistics were refreshed. `status`:: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=cluster-health-status] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=cluster-health-status] + See <>. diff --git a/docs/reference/cluster/tasks.asciidoc b/docs/reference/cluster/tasks.asciidoc index 75a6352196b29..0ffd700957506 100644 --- a/docs/reference/cluster/tasks.asciidoc +++ b/docs/reference/cluster/tasks.asciidoc @@ -32,23 +32,23 @@ on one or more nodes in the cluster. [[tasks-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=task-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=task-id] [[tasks-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=actions] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=actions] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=detailed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=detailed] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=group-by] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=group-by] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=nodes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=nodes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=parent-task-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=parent-task-id] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `wait_for_completion`:: (Optional, Boolean) If `true`, the request blocks until all found tasks are complete. diff --git a/docs/reference/cluster/update-desired-nodes.asciidoc b/docs/reference/cluster/update-desired-nodes.asciidoc index b7bbb8b3b7f4f..c72a2b53208e5 100644 --- a/docs/reference/cluster/update-desired-nodes.asciidoc +++ b/docs/reference/cluster/update-desired-nodes.asciidoc @@ -47,7 +47,7 @@ DELETE /_internal/desired_nodes [[update-desired-nodes-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `dry_run`:: (Optional, Boolean) If `true`, then the request simulates the update and diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index c1a4397ee369e..ca3d100e31e06 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -22,18 +22,18 @@ Configures <>. ==== {api-description-title} :strip-api-link: true -include::{es-repo-dir}/setup/configuration.asciidoc[tag=cluster-setting-precedence] +include::{es-ref-dir}/setup/configuration.asciidoc[tag=cluster-setting-precedence] [[cluster-update-settings-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] `include_defaults`:: (Optional, Boolean) If `true`, returns all default cluster settings. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[cluster-update-settings-api-example]] diff --git a/docs/reference/data-streams/data-stream-apis.asciidoc b/docs/reference/data-streams/data-stream-apis.asciidoc index d5a81a485af03..d525f0d8a7885 100644 --- a/docs/reference/data-streams/data-stream-apis.asciidoc +++ b/docs/reference/data-streams/data-stream-apis.asciidoc @@ -41,34 +41,34 @@ The following API is available for <>: For concepts and tutorials, see <>. -include::{es-repo-dir}/indices/create-data-stream.asciidoc[] +include::{es-ref-dir}/indices/create-data-stream.asciidoc[] -include::{es-repo-dir}/indices/delete-data-stream.asciidoc[] +include::{es-ref-dir}/indices/delete-data-stream.asciidoc[] -include::{es-repo-dir}/indices/get-data-stream.asciidoc[] +include::{es-ref-dir}/indices/get-data-stream.asciidoc[] -include::{es-repo-dir}/indices/migrate-to-data-stream.asciidoc[] +include::{es-ref-dir}/indices/migrate-to-data-stream.asciidoc[] -include::{es-repo-dir}/indices/data-stream-stats.asciidoc[] +include::{es-ref-dir}/indices/data-stream-stats.asciidoc[] -include::{es-repo-dir}/data-streams/promote-data-stream-api.asciidoc[] +include::{es-ref-dir}/data-streams/promote-data-stream-api.asciidoc[] -include::{es-repo-dir}/data-streams/modify-data-streams-api.asciidoc[] +include::{es-ref-dir}/data-streams/modify-data-streams-api.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/put-lifecycle.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/put-lifecycle.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/get-lifecycle.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/get-lifecycle.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/delete-lifecycle.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/delete-lifecycle.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/explain-lifecycle.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/explain-lifecycle.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/put-global-retention.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/put-global-retention.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/get-global-retention.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/get-global-retention.asciidoc[] -include::{es-repo-dir}/data-streams/lifecycle/apis/delete-global-retention.asciidoc[] +include::{es-ref-dir}/data-streams/lifecycle/apis/delete-global-retention.asciidoc[] -include::{es-repo-dir}/indices/downsample-data-stream.asciidoc[] +include::{es-ref-dir}/indices/downsample-data-stream.asciidoc[] diff --git a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc index fd481d7ca4815..0cf6ad395fb4d 100644 --- a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc @@ -37,7 +37,7 @@ To target all data streams use `*` or `_all`. [[delete-data-lifecycle-api-query-parms]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc index a2609dcb78ecf..e0e2df217335f 100644 --- a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc @@ -40,7 +40,7 @@ execution. (Optional, Boolean) Includes default configurations related to the lifecycle of the target. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[data-streams-explain-lifecycle-example]] ==== {api-examples-title} diff --git a/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc index 03e485f3e7eb9..0997c2d84ece2 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc @@ -28,9 +28,9 @@ Gets the global retention configuration that is applied on data streams managed [[get-global-retention-api-query-parms]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[get-global-retention-api-response-body]] ==== {api-response-body-title} diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index 1bda7d8959bee..83955417abd0b 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -39,7 +39,7 @@ To target all data streams use `*` or `_all`. [[get-data-lifecycle-api-query-parms]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc index 53bd3c2b96f0b..e68dc24f11a57 100644 --- a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc @@ -36,7 +36,7 @@ To target all data streams use `*` or `_all`. [[put-data-lifecycle-api-query-parms]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/data-streams/set-up-a-data-stream.asciidoc b/docs/reference/data-streams/set-up-a-data-stream.asciidoc index 144146b897ef0..57388a1199f57 100644 --- a/docs/reference/data-streams/set-up-a-data-stream.asciidoc +++ b/docs/reference/data-streams/set-up-a-data-stream.asciidoc @@ -261,7 +261,7 @@ PUT _data_stream/my-data-stream [[secure-data-stream]] === Secure the data stream -include::{es-repo-dir}/security/authorization/alias-privileges.asciidoc[tag=data-stream-security] +include::{es-ref-dir}/security/authorization/alias-privileges.asciidoc[tag=data-stream-security] For an example, see <>. diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 1a32e64cedb1f..02f7d7e941fe8 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -249,21 +249,21 @@ on. were executed for each `index` or `create`. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pipeline] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] `require_alias`:: (Optional, Boolean) If `true`, the request's actions must target an index alias. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_includes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_includes] `timeout`:: + @@ -280,7 +280,7 @@ timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[bulk-api-request-body]] ==== {api-request-body-title} @@ -293,15 +293,15 @@ Indexes the specified document if it does not already exist. The following line must contain the source data to be indexed. + -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-index-ds] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-index-ds] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-id] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-list-executed-pipelines] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-list-executed-pipelines] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-dynamic-templates] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-dynamic-templates] -- `delete`:: @@ -309,12 +309,12 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-dynamic-templates Removes the specified document from the index. + -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-index] `_id`:: (Required, string) The document ID. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] -- `index`:: @@ -324,15 +324,15 @@ If the document exists, replaces the document and increments the version. The following line must contain the source data to be indexed. + -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-index] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-id] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-list-executed-pipelines] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-list-executed-pipelines] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-dynamic-templates] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-dynamic-templates] -- `update`:: @@ -341,12 +341,12 @@ Performs a partial document update. The following line must contain the partial document and update options. + -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-index] `_id`:: (Required, string) The document ID. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=bulk-require-alias] -- `doc`:: diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index d671cb9b5a637..8cde1da91121a 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -175,76 +175,76 @@ this parameter or use `* or `_all`. [[docs-delete-by-query-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyzer] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyzer] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] `conflicts`:: (Optional, string) What to do if delete by query hits version conflicts: `abort` or `proceed`. Defaults to `abort`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=default_operator] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=default_operator] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=df] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=df] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=lenient] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=lenient] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=max_docs] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=max_docs] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-q] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-q] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=request_cache] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=request_cache] `refresh`:: (Optional, Boolean) If `true`, {es} refreshes all shards involved in the delete by query after the request completes. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=requests_per_second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=requests_per_second] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `scroll`:: (Optional, <>) Period to retain the <> for scrolling. See <>. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=scroll_size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=scroll_size] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search_type] `search_timeout`:: (Optional, <>) Explicit timeout for each search request. Defaults to no timeout. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=slices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=slices] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sort] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sort] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=stats] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=stats] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] `timeout`:: (Optional, <>) Period each deletion request <>. Defaults to `1m` (one minute). -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[docs-delete-by-query-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc index 1d8ff699271b2..452d7f7758bfa 100644 --- a/docs/reference/docs/delete.asciidoc +++ b/docs/reference/docs/delete.asciidoc @@ -152,24 +152,24 @@ DELETE /my-index-000001/_doc/1?timeout=5m [[docs-delete-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `timeout`:: (Optional, <>) Period to <>. Defaults to `1m` (one minute). -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=doc-version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=doc-version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version_type] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[docs-delete-api-example]] ==== {api-examples-title} diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index d0d0db332eeef..c71215fff8d73 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -162,27 +162,27 @@ deleted documents in the background as you continue to index more data. [[docs-get-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=realtime] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=realtime] `refresh`:: (Optional, Boolean) If `true`, the request refreshes the relevant shard before retrieving the document. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=stored_fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=stored_fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_includes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_includes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=doc-version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=doc-version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version_type] [[docs-get-api-response-body]] ==== {api-response-body-title} diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 4577e02024805..9d359fd7d7f02 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -86,9 +86,9 @@ format and omit this parameter. [[docs-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] [[docs-index-api-op_type]] `op_type`:: @@ -101,11 +101,11 @@ If document id is specified, it defaults to `index`. Otherwise, it defaults to ` NOTE: If the request targets a data stream, an `op_type` of `create` is required. See <>. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pipeline] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `timeout`:: + @@ -122,13 +122,13 @@ timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=doc-version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=doc-version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version_type] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=require-alias] [[docs-index-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/docs/multi-get.asciidoc b/docs/reference/docs/multi-get.asciidoc index 065e224696114..7c3eafa9c79f3 100644 --- a/docs/reference/docs/multi-get.asciidoc +++ b/docs/reference/docs/multi-get.asciidoc @@ -64,23 +64,23 @@ or when a document in the `docs` array does not specify an index. [[docs-multi-get-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=realtime] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=realtime] `refresh`:: (Optional, Boolean) If `true`, the request refreshes relevant shards before retrieving documents. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=stored_fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=stored_fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_includes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_includes] [[docs-multi-get-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/docs/multi-termvectors.asciidoc b/docs/reference/docs/multi-termvectors.asciidoc index 90b31238a5c6c..5a27e0b9b3a37 100644 --- a/docs/reference/docs/multi-termvectors.asciidoc +++ b/docs/reference/docs/multi-termvectors.asciidoc @@ -64,27 +64,27 @@ that can be included in the response. [[docs-multi-termvectors-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=field_statistics] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=field_statistics] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=offsets] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=offsets] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=payloads] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=payloads] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=positions] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=positions] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=realtime] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=realtime] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=term_statistics] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=term_statistics] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version_type] [discrete] [[docs-multi-termvectors-api-example]] diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 7c3cd8716dfe3..146b519b05e80 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -483,21 +483,21 @@ timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] `wait_for_completion`:: (Optional, Boolean) If `true`, the request blocks until the operation is complete. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=requests_per_second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=requests_per_second] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=require-alias] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=scroll] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=scroll] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=slices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=slices] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=max_docs] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=max_docs] [[docs-reindex-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/docs/termvectors.asciidoc b/docs/reference/docs/termvectors.asciidoc index 8fa6392e08d5b..31dfba1ac2668 100644 --- a/docs/reference/docs/termvectors.asciidoc +++ b/docs/reference/docs/termvectors.asciidoc @@ -143,27 +143,27 @@ from is randomly selected. Use `routing` only to hit a particular shard. [[docs-termvectors-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=field_statistics] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=field_statistics] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=offsets] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=offsets] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=payloads] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=payloads] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=positions] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=positions] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=realtime] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=realtime] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=term_statistics] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=term_statistics] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version_type] [[docs-termvectors-api-example]] ==== {api-examples-title} diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 5ef9d288623ce..bc63fa4e33d01 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -167,70 +167,70 @@ this parameter or use `*` or `_all`. [[docs-update-by-query-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyzer] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyzer] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] `conflicts`:: (Optional, string) What to do if update by query hits version conflicts: `abort` or `proceed`. Defaults to `abort`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=default_operator] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=default_operator] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=df] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=df] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=lenient] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=lenient] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=max_docs] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=max_docs] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pipeline] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-q] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-q] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=request_cache] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=request_cache] `refresh`:: (Optional, Boolean) If `true`, {es} refreshes affected shards to make the operation visible to search. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=requests_per_second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=requests_per_second] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `scroll`:: (Optional, <>) Period to retain the <> for scrolling. See <>. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=scroll_size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=scroll_size] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search_type] `search_timeout`:: (Optional, <>) Explicit timeout for each search request. Defaults to no timeout. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=slices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=slices] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sort] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sort] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=stats] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=stats] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] `timeout`:: + @@ -246,9 +246,9 @@ timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=version] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[docs-update-by-query-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 989335eb702f4..ca6a7e489449b 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -53,22 +53,22 @@ automatically if it doesn't exist. For more information, see <>. [[docs-update-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] `lang`:: (Optional, string) The script language. Default: `painless`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=require-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=require-alias] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] `retry_on_conflict`:: (Optional, integer) Specify how many times should the operation be retried when a conflict occurs. Default: 0. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `_source`:: (Optional, list) Set to `false` to disable source retrieval (default: `true`). @@ -94,7 +94,7 @@ timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[update-api-example]] ==== {api-examples-title} diff --git a/docs/reference/eql/eql-search-api.asciidoc b/docs/reference/eql/eql-search-api.asciidoc index d4ea3f3c7499d..d7f10f4627f6c 100644 --- a/docs/reference/eql/eql-search-api.asciidoc +++ b/docs/reference/eql/eql-search-api.asciidoc @@ -99,7 +99,7 @@ ignored. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -206,7 +206,7 @@ returned. + A greater `fetch_size` value often increases search speed but uses more memory. -include::{es-repo-dir}/search/search.asciidoc[tag=fields-param-def] +include::{es-ref-dir}/search/search.asciidoc[tag=fields-param-def] `filter`:: (Optional, <>) @@ -280,7 +280,7 @@ command]. NOTE: This parameter may change the set of returned hits. However, it does not change the sort order of hits in the response. -include::{es-repo-dir}/search/search.asciidoc[tag=runtime-mappings-def] +include::{es-ref-dir}/search/search.asciidoc[tag=runtime-mappings-def] [[eql-search-api-params-size]] `size`:: diff --git a/docs/reference/eql/eql.asciidoc b/docs/reference/eql/eql.asciidoc index 2ede5e0fc737e..8f3b5b893ea52 100644 --- a/docs/reference/eql/eql.asciidoc +++ b/docs/reference/eql/eql.asciidoc @@ -1013,7 +1013,7 @@ You can also use the `fields` parameter to retrieve and format specific fields in the response. This field is identical to the search API's <>. -include::{es-repo-dir}/search/search-your-data/retrieve-selected-fields.asciidoc[tag=fields-param-desc] +include::{es-ref-dir}/search/search-your-data/retrieve-selected-fields.asciidoc[tag=fields-param-desc] The following search request uses the `fields` parameter to retrieve values for the `event.type` field, all fields starting with `process.`, and the @@ -1039,7 +1039,7 @@ GET /my-data-stream/_eql/search?filter_path=-hits.events._source ---- // TEST[setup:sec_logs] -include::{es-repo-dir}/search/search-your-data/retrieve-selected-fields.asciidoc[tag=fields-param-callouts] +include::{es-ref-dir}/search/search-your-data/retrieve-selected-fields.asciidoc[tag=fields-param-callouts] The response includes values as a flat list in the `fields` section for each hit. diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index f35a62c49aca3..95278314b0253 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -13,16 +13,16 @@ With {esql}, you can execute a single query across multiple clusters. ==== Prerequisites -include::{es-repo-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-prereqs] +include::{es-ref-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-prereqs] -include::{es-repo-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-gateway-seed-nodes] +include::{es-ref-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-gateway-seed-nodes] -include::{es-repo-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-proxy-mode] +include::{es-ref-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-proxy-mode] [discrete] [[ccq-remote-cluster-setup]] ==== Remote cluster setup -include::{es-repo-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-remote-cluster-setup] +include::{es-ref-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-remote-cluster-setup] <1> Since `skip_unavailable` was not set on `cluster_three`, it uses the default of `false`. See the <> @@ -221,4 +221,4 @@ in the response, such as execution time, selected target indices, and shards. [[ccq-during-upgrade]] ==== Query across clusters during an upgrade -include::{es-repo-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-during-upgrade] +include::{es-ref-dir}/search/search-your-data/search-across-clusters.asciidoc[tag=ccs-during-upgrade] diff --git a/docs/reference/esql/esql-enrich-data.asciidoc b/docs/reference/esql/esql-enrich-data.asciidoc index e465d7daae126..c48118d1c367a 100644 --- a/docs/reference/esql/esql-enrich-data.asciidoc +++ b/docs/reference/esql/esql-enrich-data.asciidoc @@ -91,7 +91,7 @@ your query. [[esql-enrich-prereqs]] ==== Prerequisites -include::{es-repo-dir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] +include::{es-ref-dir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] [discrete] [[esql-create-enrich-source-index]] @@ -130,7 +130,7 @@ include::processing-commands/enrich.asciidoc[tag=examples] [[esql-update-enrich-data]] ==== Update an enrich index -include::{es-repo-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index] +include::{es-ref-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index] [discrete] [[esql-update-enrich-policies]] diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 351a8efdc8ae9..0e23c0d97e61b 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -21,7 +21,7 @@ This getting started is also available as an https://github.com/elastic/elastics To follow along with the queries in this guide, you can either set up your own deployment, or use Elastic's public {esql} demo environment. -include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc[] +include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc[] [discrete] [[esql-getting-started-running-queries]] @@ -29,7 +29,7 @@ include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-sample-data. In {kib}, you can use Console or Discover to run {esql} queries: -include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-discover-console.asciidoc[] +include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-discover-console.asciidoc[] [discrete] [[esql-getting-started-first-query]] @@ -279,7 +279,7 @@ Before you can use `ENRICH`, you first need to <> and <> an <>. -include::{es-repo-dir}/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc[] +include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc[] After creating and executing a policy, you can use it with the `ENRICH` command: diff --git a/docs/reference/esql/esql-index-options.asciidoc b/docs/reference/esql/esql-index-options.asciidoc index ba2307f611d45..721461bd96719 100644 --- a/docs/reference/esql/esql-index-options.asciidoc +++ b/docs/reference/esql/esql-index-options.asciidoc @@ -24,7 +24,7 @@ values may differ. The currently supported options are: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. @@ -37,7 +37,7 @@ missing or closed index. + Defaults to `true`. -include::{es-repo-dir}/search/search.asciidoc[tag=search-preference] +include::{es-ref-dir}/search/search.asciidoc[tag=search-preference] *Examples* diff --git a/docs/reference/esql/index.asciidoc b/docs/reference/esql/index.asciidoc index 531336277ba6b..5cb02064dc794 100644 --- a/docs/reference/esql/index.asciidoc +++ b/docs/reference/esql/index.asciidoc @@ -1,7 +1,7 @@ [[esql]] = {esql} -:esql-tests: {xes-repo-dir}/../../plugin/esql/qa +:esql-tests: {elasticsearch-root}/x-pack/docs/{lang}/../../plugin/esql/qa :esql-specs: {esql-tests}/testFixtures/src/main/resources [partintro] diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 3e474953a72f9..2a5dbc2f0d031 100644 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -24,8 +24,8 @@ The simplest way to set up {es} is to create a managed deployment with {ess} on {ecloud}. If you prefer to manage your own test environment, install and run {es} using Docker. -include::{es-repo-dir}/tab-widgets/code.asciidoc[] -include::{es-repo-dir}/tab-widgets/quick-start-install-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/code.asciidoc[] +include::{es-ref-dir}/tab-widgets/quick-start-install-widget.asciidoc[] [discrete] [[send-requests-to-elasticsearch]] @@ -36,7 +36,7 @@ with {es} using any client that sends HTTP requests, such as https://curl.se[curl]. You can also use {kib}'s Console to send requests to {es}. -include::{es-repo-dir}/tab-widgets/api-call-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/api-call-widget.asciidoc[] [discrete] [[add-data]] diff --git a/docs/reference/how-to/disk-usage.asciidoc b/docs/reference/how-to/disk-usage.asciidoc index 55a7b33f53cb3..975067fd576b6 100644 --- a/docs/reference/how-to/disk-usage.asciidoc +++ b/docs/reference/how-to/disk-usage.asciidoc @@ -94,7 +94,7 @@ Indices in Elasticsearch are stored in one or more shards. Each shard is a Lucen The <> can be used to reduce the number of segments per shard. In many cases, the number of segments can be reduced to one per shard by setting `max_num_segments=1`. -include::{es-repo-dir}/indices/forcemerge.asciidoc[tag=force-merge-read-only-warn] +include::{es-ref-dir}/indices/forcemerge.asciidoc[tag=force-merge-read-only-warn] [discrete] === Shrink index diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc index 924207bca4705..2bff5f82bf736 100644 --- a/docs/reference/how-to/indexing-speed.asciidoc +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -43,7 +43,7 @@ The operation that consists of making changes visible to search - called a <> - is costly, and calling it often while there is ongoing indexing activity can hurt indexing speed. -include::{es-repo-dir}/indices/refresh.asciidoc[tag=refresh-interval-default] +include::{es-ref-dir}/indices/refresh.asciidoc[tag=refresh-interval-default] This is the optimal configuration if you have no or very little search traffic (e.g. less than one search request every 5 minutes) and want to optimize for indexing speed. This behavior aims to automatically optimize bulk indexing in diff --git a/docs/reference/how-to/knn-search.asciidoc b/docs/reference/how-to/knn-search.asciidoc index bfe99ad615c47..194d122cef159 100644 --- a/docs/reference/how-to/knn-search.asciidoc +++ b/docs/reference/how-to/knn-search.asciidoc @@ -109,7 +109,7 @@ force merge to one segment, the kNN search only need to check a single, all-inclusive HNSW graph. Force merging `dense_vector` fields is an expensive operation that can take significant time to complete. -include::{es-repo-dir}/indices/forcemerge.asciidoc[tag=force-merge-read-only-warn] +include::{es-ref-dir}/indices/forcemerge.asciidoc[tag=force-merge-read-only-warn] [discrete] ==== Create large segments during bulk indexing diff --git a/docs/reference/how-to/use-elasticsearch-for-time-series-data.asciidoc b/docs/reference/how-to/use-elasticsearch-for-time-series-data.asciidoc index 18de2497760c8..21a516aeb53f6 100644 --- a/docs/reference/how-to/use-elasticsearch-for-time-series-data.asciidoc +++ b/docs/reference/how-to/use-elasticsearch-for-time-series-data.asciidoc @@ -26,7 +26,7 @@ stream. The steps for setting up data tiers vary based on your deployment type: -include::{es-repo-dir}/tab-widgets/data-tiers-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/data-tiers-widget.asciidoc[] [discrete] [[register-snapshot-repository]] @@ -39,7 +39,7 @@ To use {search-snaps}, you must register a supported snapshot repository. The steps for registering this repository vary based on your deployment type and storage provider: -include::{es-repo-dir}/tab-widgets/snapshot-repo-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/snapshot-repo-widget.asciidoc[] [discrete] [[create-edit-index-lifecycle-policy]] @@ -58,7 +58,7 @@ ensure your policy: * Uses {search-snaps} in the cold and frozen phases, if wanted. * Includes a delete phase, if needed. -include::{es-repo-dir}/tab-widgets/ilm-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/ilm-widget.asciidoc[] [discrete] [[create-ts-component-templates]] @@ -68,19 +68,19 @@ TIP: If you use {fleet} or {agent}, skip to <>. {fleet} and {agent} use built-in templates to create data streams for you. If you use a custom application, you need to set up your own data stream. -include::{es-repo-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ds-create-component-templates] +include::{es-ref-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ds-create-component-templates] [discrete] [[create-ts-index-template]] === Create an index template -include::{es-repo-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ds-create-index-template] +include::{es-ref-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ds-create-index-template] [discrete] [[add-data-to-data-stream]] === Add data to a data stream -include::{es-repo-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ds-create-data-stream] +include::{es-ref-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ds-create-data-stream] [discrete] [[search-visualize-your-data]] diff --git a/docs/reference/ilm/apis/delete-lifecycle.asciidoc b/docs/reference/ilm/apis/delete-lifecycle.asciidoc index b8e4c7a0bd217..632cb982b3968 100644 --- a/docs/reference/ilm/apis/delete-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/delete-lifecycle.asciidoc @@ -35,7 +35,7 @@ the request fails and returns an error. [[ilm-delete-lifecycle-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-delete-lifecycle-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/explain.asciidoc b/docs/reference/ilm/apis/explain.asciidoc index 64e9ec9d5241b..fbe017619048f 100644 --- a/docs/reference/ilm/apis/explain.asciidoc +++ b/docs/reference/ilm/apis/explain.asciidoc @@ -49,7 +49,7 @@ or `_all`. {ilm-init} and are in an error state, either due to an encountering an error while executing the policy, or attempting to use a policy that does not exist. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-explain-lifecycle-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/get-lifecycle.asciidoc b/docs/reference/ilm/apis/get-lifecycle.asciidoc index f736de20ed437..7443610065487 100644 --- a/docs/reference/ilm/apis/get-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/get-lifecycle.asciidoc @@ -36,7 +36,7 @@ modified date. If no policy is specified, returns all defined policies. [[ilm-get-lifecycle-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-get-lifecycle-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/get-status.asciidoc b/docs/reference/ilm/apis/get-status.asciidoc index 384f6e9e55ef0..7e9e963f6f369 100644 --- a/docs/reference/ilm/apis/get-status.asciidoc +++ b/docs/reference/ilm/apis/get-status.asciidoc @@ -27,7 +27,7 @@ You can start or stop {ilm-init} with the <> and [[ilm-get-status-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[ilm-get-status-response-body]] diff --git a/docs/reference/ilm/apis/move-to-step.asciidoc b/docs/reference/ilm/apis/move-to-step.asciidoc index f901b24024406..19cc9f7088867 100644 --- a/docs/reference/ilm/apis/move-to-step.asciidoc +++ b/docs/reference/ilm/apis/move-to-step.asciidoc @@ -50,7 +50,7 @@ policy are considered valid, an index cannot move to a step that is not part of [[ilm-move-to-step-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[ilm-move-to-step-request-body]] diff --git a/docs/reference/ilm/apis/put-lifecycle.asciidoc b/docs/reference/ilm/apis/put-lifecycle.asciidoc index 6d7c8ea5e297f..ffd59a14d8c25 100644 --- a/docs/reference/ilm/apis/put-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/put-lifecycle.asciidoc @@ -46,7 +46,7 @@ To avoid naming collisions with built-in and Fleet-managed ILM policies, avoid u [[ilm-put-lifecycle-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-put-lifecycle-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/remove-policy-from-index.asciidoc b/docs/reference/ilm/apis/remove-policy-from-index.asciidoc index 0bc5010936970..20e0df9f3cb92 100644 --- a/docs/reference/ilm/apis/remove-policy-from-index.asciidoc +++ b/docs/reference/ilm/apis/remove-policy-from-index.asciidoc @@ -40,7 +40,7 @@ target. Supports wildcards (`*`). To target all data streams and indices, use [[ilm-remove-policy-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-remove-policy-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/retry-policy.asciidoc b/docs/reference/ilm/apis/retry-policy.asciidoc index c1c78bdb88246..cb2587fbb151b 100644 --- a/docs/reference/ilm/apis/retry-policy.asciidoc +++ b/docs/reference/ilm/apis/retry-policy.asciidoc @@ -35,7 +35,7 @@ step. [[ilm-retry-policy-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-retry-policy-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/start.asciidoc b/docs/reference/ilm/apis/start.asciidoc index 1b5a4b9b2561d..32db585c6b14c 100644 --- a/docs/reference/ilm/apis/start.asciidoc +++ b/docs/reference/ilm/apis/start.asciidoc @@ -31,7 +31,7 @@ necessary if it has been stopped using the <>. [[ilm-start-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-start-example]] ==== {api-examples-title} diff --git a/docs/reference/ilm/apis/stop.asciidoc b/docs/reference/ilm/apis/stop.asciidoc index 0fa7875b0efdd..1e9cfb94d0b1f 100644 --- a/docs/reference/ilm/apis/stop.asciidoc +++ b/docs/reference/ilm/apis/stop.asciidoc @@ -36,7 +36,7 @@ if {ilm-init} is running. [[ilm-stop-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[ilm-stop-example]] ==== {api-examples-title} diff --git a/docs/reference/index-modules/blocks.asciidoc b/docs/reference/index-modules/blocks.asciidoc index 2d89676a8af2b..5fc89dedf0247 100644 --- a/docs/reference/index-modules/blocks.asciidoc +++ b/docs/reference/index-modules/blocks.asciidoc @@ -86,7 +86,7 @@ PUT /my-index-000001/_block/write [[add-index-block-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index] + By default, you must explicitly name the indices you are adding blocks to. To allow the adding of blocks to indices with `_all`, `*`, or other wildcard @@ -116,17 +116,17 @@ Disable write operations. However, metadata changes are still allowed. [[add-index-block-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [discrete] [[add-index-block-api-example]] diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index b09d67e990636..e47304f1e1337 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -1,13 +1,6 @@ [[elasticsearch-reference]] = Elasticsearch Guide -:include-xpack: true -:es-test-dir: {elasticsearch-root}/docs/src/test -:plugins-examples-dir: {elasticsearch-root}/plugins/examples -:dependencies-dir: {elasticsearch-root}/build-tools-internal -:xes-repo-dir: {elasticsearch-root}/x-pack/docs/{lang} -:es-repo-dir: {elasticsearch-root}/docs/reference - include::../Versions.asciidoc[] include::links.asciidoc[] diff --git a/docs/reference/indices/add-alias.asciidoc b/docs/reference/indices/add-alias.asciidoc index 860e9ca46f795..e14af6a64a2ec 100644 --- a/docs/reference/indices/add-alias.asciidoc +++ b/docs/reference/indices/add-alias.asciidoc @@ -46,7 +46,7 @@ indices return an error. [[add-alias-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[add-alias-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/indices/alias-exists.asciidoc b/docs/reference/indices/alias-exists.asciidoc index 997dbad5bd6c8..f820a95028a0f 100644 --- a/docs/reference/indices/alias-exists.asciidoc +++ b/docs/reference/indices/alias-exists.asciidoc @@ -44,7 +44,7 @@ omit this parameter or use `*` or `_all`. [[alias-exists-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `all`. @@ -52,7 +52,7 @@ Defaults to `all`. (Optional, Boolean) If `false`, requests that include a missing data stream or index in the `` return an error. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] [[alias-exists-api-response-codes]] ==== {api-response-codes-title} diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 34248cc5f98d3..1df9e0a4883b8 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -43,7 +43,7 @@ for the index. [[indices-aliases-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[indices-aliases-api-request-body]] diff --git a/docs/reference/indices/apis/reload-analyzers.asciidoc b/docs/reference/indices/apis/reload-analyzers.asciidoc index dc6ed7d916a1f..ca5f540564f8e 100644 --- a/docs/reference/indices/apis/reload-analyzers.asciidoc +++ b/docs/reference/indices/apis/reload-analyzers.asciidoc @@ -85,15 +85,15 @@ and indices, use `*` or `_all`. [[indices-reload-analyzers-api-query-params]] === {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] [discrete] diff --git a/docs/reference/indices/clearcache.asciidoc b/docs/reference/indices/clearcache.asciidoc index 10b590fce0976..a3150ec6f72ee 100644 --- a/docs/reference/indices/clearcache.asciidoc +++ b/docs/reference/indices/clearcache.asciidoc @@ -39,11 +39,11 @@ and indices, omit this parameter or use `*` or `_all`. [[clear-cache-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -77,7 +77,7 @@ or field aliases. Comma-separated list of index names used to limit the request. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `query`:: (Optional, Boolean) diff --git a/docs/reference/indices/clone-index.asciidoc b/docs/reference/indices/clone-index.asciidoc index ef8ed28c6ac05..c8e5d2e200f2e 100644 --- a/docs/reference/indices/clone-index.asciidoc +++ b/docs/reference/indices/clone-index.asciidoc @@ -168,15 +168,15 @@ on index creation applies to the clone index action as well. (Required, string) Name of the source index to clone. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=target-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=target-index] [[clone-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[clone-index-api-request-body]] @@ -185,6 +185,6 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `aliases`:: (Optional, object of objects) Aliases for the resulting index. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=target-index-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=target-index-settings] diff --git a/docs/reference/indices/close.asciidoc b/docs/reference/indices/close.asciidoc index 5caa6e8922892..a4bf1742fdea8 100644 --- a/docs/reference/indices/close.asciidoc +++ b/docs/reference/indices/close.asciidoc @@ -29,13 +29,13 @@ POST /my-index-000001/_close You use the close index API to close open indices. -include::{es-repo-dir}/indices/open-close.asciidoc[tag=closed-index] +include::{es-ref-dir}/indices/open-close.asciidoc[tag=closed-index] [[close-index-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index] + To close all indices, use `_all` or `*`. By default, you must explicitly name the indices you are closing. @@ -48,19 +48,19 @@ or using the <> API. [[close-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[close-index-api-example]] diff --git a/docs/reference/indices/create-index.asciidoc b/docs/reference/indices/create-index.asciidoc index d39591a37df5f..2e66f3d6030cb 100644 --- a/docs/reference/indices/create-index.asciidoc +++ b/docs/reference/indices/create-index.asciidoc @@ -60,9 +60,9 @@ Index names must meet the following criteria: [[indices-create-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[indices-create-api-request-body]] @@ -113,9 +113,9 @@ specified, this overwrites the `routing` value for search operations. // end::aliases-props[] -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=mappings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=settings] [[indices-create-api-example]] ==== {api-examples-title} diff --git a/docs/reference/indices/dangling-index-delete.asciidoc b/docs/reference/indices/dangling-index-delete.asciidoc index 53be30ef76287..6af35031e9e61 100644 --- a/docs/reference/indices/dangling-index-delete.asciidoc +++ b/docs/reference/indices/dangling-index-delete.asciidoc @@ -24,7 +24,7 @@ DELETE /_dangling/?accept_data_loss=true [[dangling-index-delete-api-desc]] ==== {api-description-title} -include::{es-repo-dir}/indices/dangling-indices-list.asciidoc[tag=dangling-index-description] +include::{es-ref-dir}/indices/dangling-indices-list.asciidoc[tag=dangling-index-description] Deletes a dangling index by referencing its UUID. Use the @@ -47,4 +47,4 @@ UUID of the index to delete. You can find this using the This field must be set to `true` in order to carry out the import, since it will no longer be possible to recover the data from the dangling index. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/indices/dangling-index-import.asciidoc b/docs/reference/indices/dangling-index-import.asciidoc index c022c51d40912..44cde56de8c95 100644 --- a/docs/reference/indices/dangling-index-import.asciidoc +++ b/docs/reference/indices/dangling-index-import.asciidoc @@ -24,7 +24,7 @@ POST /_dangling/?accept_data_loss=true [[dangling-index-import-api-desc]] ==== {api-description-title} -include::{es-repo-dir}/indices/dangling-indices-list.asciidoc[tag=dangling-index-description] +include::{es-ref-dir}/indices/dangling-indices-list.asciidoc[tag=dangling-index-description] Import a single index into the cluster by referencing its UUID. Use the <> to locate the UUID of an index. @@ -48,7 +48,7 @@ cannot know where the dangling index data came from or determine which shard copies are fresh and which are stale, it cannot guarantee that the imported data represents the latest state of the index when it was last in the cluster. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[dangling-index-import-api-example]] ==== {api-examples-title} diff --git a/docs/reference/indices/data-stream-stats.asciidoc b/docs/reference/indices/data-stream-stats.asciidoc index a1f83701bb521..3ed285abc035a 100644 --- a/docs/reference/indices/data-stream-stats.asciidoc +++ b/docs/reference/indices/data-stream-stats.asciidoc @@ -78,7 +78,7 @@ To target all data streams in a cluster, omit this parameter or use `*`. [[data-stream-stats-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/indices/delete-alias.asciidoc b/docs/reference/indices/delete-alias.asciidoc index 680e255a9728a..748862df06100 100644 --- a/docs/reference/indices/delete-alias.asciidoc +++ b/docs/reference/indices/delete-alias.asciidoc @@ -42,4 +42,4 @@ the request. Supports wildcards (`*`). [[delete-alias-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/indices/delete-component-template.asciidoc b/docs/reference/indices/delete-component-template.asciidoc index 21693c44aadc6..0ca6560f17ccb 100644 --- a/docs/reference/indices/delete-component-template.asciidoc +++ b/docs/reference/indices/delete-component-template.asciidoc @@ -52,10 +52,10 @@ that specify index mappings, settings, and aliases. [[delete-component-template-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=component-template] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=component-template] [[delete-component-template-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/indices/delete-data-stream.asciidoc b/docs/reference/indices/delete-data-stream.asciidoc index eb970f0f29d40..38e7a00d451d8 100644 --- a/docs/reference/indices/delete-data-stream.asciidoc +++ b/docs/reference/indices/delete-data-stream.asciidoc @@ -59,6 +59,6 @@ Wildcard (`*`) expressions are supported. [[delete-data-stream-api-query-parms]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/indices/delete-index-template-v1.asciidoc b/docs/reference/indices/delete-index-template-v1.asciidoc index 9caf6935fe4f2..ca0b5a0e726bd 100644 --- a/docs/reference/indices/delete-index-template-v1.asciidoc +++ b/docs/reference/indices/delete-index-template-v1.asciidoc @@ -55,4 +55,4 @@ expressions are supported. [[delete-template-api-v1-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/indices/delete-index-template.asciidoc b/docs/reference/indices/delete-index-template.asciidoc index f410f091f6864..02396310daff4 100644 --- a/docs/reference/indices/delete-index-template.asciidoc +++ b/docs/reference/indices/delete-index-template.asciidoc @@ -55,10 +55,10 @@ and <> that can be applied automatically to new indices. [[delete-template-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-template] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-template] [[delete-template-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/indices/delete-index.asciidoc b/docs/reference/indices/delete-index.asciidoc index 92f403c22ae3a..d5d168154e44a 100644 --- a/docs/reference/indices/delete-index.asciidoc +++ b/docs/reference/indices/delete-index.asciidoc @@ -50,14 +50,14 @@ setting to `false`. [[delete-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open,closed`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/indices/diskusage.asciidoc b/docs/reference/indices/diskusage.asciidoc index 98c2c799cf05d..3510ba346e5a7 100644 --- a/docs/reference/indices/diskusage.asciidoc +++ b/docs/reference/indices/diskusage.asciidoc @@ -41,11 +41,11 @@ resources significantly. [[analyze-index-disk-usage-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -53,13 +53,13 @@ Defaults to `open`. (Optional, Boolean) If `true`, the API performs a flush before analysis. If `false`, the response may not include uncommitted data. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `run_expensive_tasks`:: (Required, Boolean) Analyzing field disk usage is resource-intensive. To use the API, this parameter must be set to `true`. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[analyze-index-disk-usage-api-example]] diff --git a/docs/reference/indices/downsample-data-stream.asciidoc b/docs/reference/indices/downsample-data-stream.asciidoc index 8226c365dd50b..5ace4e03dfb66 100644 --- a/docs/reference/indices/downsample-data-stream.asciidoc +++ b/docs/reference/indices/downsample-data-stream.asciidoc @@ -103,7 +103,7 @@ or `manage` <> for the data stream. -- (Required, string) Name of the index to create. -include::{es-repo-dir}/indices/create-index.asciidoc[tag=index-name-reqs] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=index-name-reqs] -- [role="child_attributes"] diff --git a/docs/reference/indices/field-usage-stats.asciidoc b/docs/reference/indices/field-usage-stats.asciidoc index 9ff3143cc3893..9fd1d9e59eb33 100644 --- a/docs/reference/indices/field-usage-stats.asciidoc +++ b/docs/reference/indices/field-usage-stats.asciidoc @@ -33,20 +33,20 @@ GET /my-index-000001/_field_usage_stats [[field-usage-stats-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index] [[field-usage-stats-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `fields`:: + diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index 25d39a17af306..61c44f157da95 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -66,11 +66,11 @@ this parameter or use `*` or `_all`. [[flush-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -90,7 +90,7 @@ This parameter is considered internal. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `wait_if_ongoing`:: + diff --git a/docs/reference/indices/forcemerge.asciidoc b/docs/reference/indices/forcemerge.asciidoc index e316e0c5e1ae8..1d473acbd5d48 100644 --- a/docs/reference/indices/forcemerge.asciidoc +++ b/docs/reference/indices/forcemerge.asciidoc @@ -103,11 +103,11 @@ and indices, omit this parameter or use `*` or `_all`. [[forcemerge-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -118,7 +118,7 @@ If `true`, after the force merge. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `max_num_segments`:: + diff --git a/docs/reference/indices/get-alias.asciidoc b/docs/reference/indices/get-alias.asciidoc index 7c2f414ce5f40..743aaf7aee174 100644 --- a/docs/reference/indices/get-alias.asciidoc +++ b/docs/reference/indices/get-alias.asciidoc @@ -47,11 +47,11 @@ omit this parameter or use `*` or `_all`. [[get-alias-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `all`. @@ -59,4 +59,4 @@ Defaults to `all`. (Optional, Boolean) If `false`, requests that include a missing data stream or index in the `` return an error. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc index d35e7a3d5e2ee..240a33164b379 100644 --- a/docs/reference/indices/get-data-stream.asciidoc +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -97,7 +97,7 @@ returned. [[get-data-stream-api-query-parms]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/indices/get-field-mapping.asciidoc b/docs/reference/indices/get-field-mapping.asciidoc index f72210b58e8fd..ac5895872fbb5 100644 --- a/docs/reference/indices/get-field-mapping.asciidoc +++ b/docs/reference/indices/get-field-mapping.asciidoc @@ -47,13 +47,13 @@ limit returned information. [[get-field-mapping-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `include_defaults`:: (Optional, Boolean) If `true`, the response includes default mapping values. diff --git a/docs/reference/indices/get-index-template-v1.asciidoc b/docs/reference/indices/get-index-template-v1.asciidoc index aa29786033d11..602ca2fe454ad 100644 --- a/docs/reference/indices/get-index-template-v1.asciidoc +++ b/docs/reference/indices/get-index-template-v1.asciidoc @@ -52,7 +52,7 @@ privilege>> to use this API. [[get-template-v1-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-template] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-template] + To return all index templates, omit this parameter or use a value of `_all` or `*`. @@ -61,11 +61,11 @@ or use a value of `_all` or `*`. [[get-template-v1-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[get-template-v1-api-example]] diff --git a/docs/reference/indices/get-index.asciidoc b/docs/reference/indices/get-index.asciidoc index 3e08cfc02c28a..2551d25801d70 100644 --- a/docs/reference/indices/get-index.asciidoc +++ b/docs/reference/indices/get-index.asciidoc @@ -36,11 +36,11 @@ and indices, omit this parameter or use `*` or `_all`. [[get-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -50,15 +50,15 @@ Return information about specific index features. Supports comma- separated values. Valid values are `aliases`, `mappings`, and `settings`. Defaults to `aliases,mappings,settings`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-defaults] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-defaults] `ignore_unavailable`:: (Optional, Boolean) If `false`, requests that target a missing index return an error. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] diff --git a/docs/reference/indices/get-mapping.asciidoc b/docs/reference/indices/get-mapping.asciidoc index 90368126b57bd..16dc8c66d0715 100644 --- a/docs/reference/indices/get-mapping.asciidoc +++ b/docs/reference/indices/get-mapping.asciidoc @@ -38,19 +38,19 @@ and indices, omit this parameter or use `*` or `_all`. [[get-mapping-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[get-mapping-api-example]] diff --git a/docs/reference/indices/get-settings.asciidoc b/docs/reference/indices/get-settings.asciidoc index f7ba885b55cb9..b6cb6d2926387 100644 --- a/docs/reference/indices/get-settings.asciidoc +++ b/docs/reference/indices/get-settings.asciidoc @@ -44,23 +44,23 @@ used to limit the request. [[get-index-settings-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-defaults] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-defaults] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[get-index-settings-api-example]] diff --git a/docs/reference/indices/index-template-exists-v1.asciidoc b/docs/reference/indices/index-template-exists-v1.asciidoc index 888a967056eeb..2358f0b1a376e 100644 --- a/docs/reference/indices/index-template-exists-v1.asciidoc +++ b/docs/reference/indices/index-template-exists-v1.asciidoc @@ -43,17 +43,17 @@ and <> that can be applied automatically to new indices. [[template-exists-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-template] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-template] [[template-exists-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[template-exists-api-response-codes]] diff --git a/docs/reference/indices/indices-exists.asciidoc b/docs/reference/indices/indices-exists.asciidoc index 934b224b8e796..d699d36add03b 100644 --- a/docs/reference/indices/indices-exists.asciidoc +++ b/docs/reference/indices/indices-exists.asciidoc @@ -35,21 +35,21 @@ Supports wildcards (`*`). [[indices-exists-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-defaults] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-defaults] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] [[indices-exists-api-response-codes]] ==== {api-response-codes-title} diff --git a/docs/reference/indices/open-close.asciidoc b/docs/reference/indices/open-close.asciidoc index bac5c07a01954..a077c4d19fd56 100644 --- a/docs/reference/indices/open-close.asciidoc +++ b/docs/reference/indices/open-close.asciidoc @@ -96,19 +96,19 @@ or using the <> API. [[open-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `closed`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[open-index-api-example]] diff --git a/docs/reference/indices/put-component-template.asciidoc b/docs/reference/indices/put-component-template.asciidoc index faf7e67039de7..0a0e36b63e6cd 100644 --- a/docs/reference/indices/put-component-template.asciidoc +++ b/docs/reference/indices/put-component-template.asciidoc @@ -123,7 +123,7 @@ The exception of that rule are the `*@custom` component templates that let you s If `true`, this request cannot replace or update existing component templates. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[put-component-template-api-request-body]] @@ -140,13 +140,13 @@ This is the template to be applied, may optionally include a `mappings`, `aliases`:: (Optional, object of objects) Aliases to add. + -include::{es-repo-dir}/indices/put-index-template.asciidoc[tag=template-ds-alias] +include::{es-ref-dir}/indices/put-index-template.asciidoc[tag=template-ds-alias] + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=mappings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=settings] ==== `version`:: diff --git a/docs/reference/indices/put-index-template-v1.asciidoc b/docs/reference/indices/put-index-template-v1.asciidoc index 5b7713656c4d3..86a8a54edd97f 100644 --- a/docs/reference/indices/put-index-template-v1.asciidoc +++ b/docs/reference/indices/put-index-template-v1.asciidoc @@ -112,7 +112,7 @@ Templates with lower `order` values are merged first. Templates with higher `order` values are merged later, overriding templates with lower values. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[put-index-template-v1-api-request-body]] @@ -126,11 +126,11 @@ used to match the names of indices during creation. `aliases`:: (Optional, object of objects) Aliases for the index. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=mappings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=settings] `version`:: (Optional, integer) diff --git a/docs/reference/indices/put-index-template.asciidoc b/docs/reference/indices/put-index-template.asciidoc index b9460bda86a09..bcc7fa9caa812 100644 --- a/docs/reference/indices/put-index-template.asciidoc +++ b/docs/reference/indices/put-index-template.asciidoc @@ -156,7 +156,7 @@ aliases. Otherwise, these are index aliases. Data stream aliases ignore the `index_routing`, `routing`, and `search_routing` options. // end::template-ds-alias[] + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] include::{docdir}/rest-api/common-parms.asciidoc[tag=mappings] diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index bd2382c38910f..dc6dbff1df42c 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -50,17 +50,17 @@ and indices, omit this parameter or use `*` or `_all`. [[put-mapping-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `write_index_only`:: (Optional, Boolean) diff --git a/docs/reference/indices/recovery.asciidoc b/docs/reference/indices/recovery.asciidoc index 81b3aa13580c0..b4e4bd33f819a 100644 --- a/docs/reference/indices/recovery.asciidoc +++ b/docs/reference/indices/recovery.asciidoc @@ -70,11 +70,11 @@ and indices, omit this parameter or use `*` or `_all`. [[index-recovery-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=active-only] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=active-only] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=detailed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=detailed] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-query-parm] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-query-parm] [[index-recovery-api-response-body]] diff --git a/docs/reference/indices/refresh.asciidoc b/docs/reference/indices/refresh.asciidoc index f0f9d06d6fc44..bd8e821ff56b9 100644 --- a/docs/reference/indices/refresh.asciidoc +++ b/docs/reference/indices/refresh.asciidoc @@ -81,15 +81,15 @@ omit this parameter or use `*` or `_all`. [[refresh-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] [[refresh-api-example]] diff --git a/docs/reference/indices/resolve-cluster.asciidoc b/docs/reference/indices/resolve-cluster.asciidoc index 8fa53bfc27056..48e6bfac4af10 100644 --- a/docs/reference/indices/resolve-cluster.asciidoc +++ b/docs/reference/indices/resolve-cluster.asciidoc @@ -91,15 +91,15 @@ Resources on <> can be specified using the [[resolve-cluster-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] + Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. diff --git a/docs/reference/indices/resolve.asciidoc b/docs/reference/indices/resolve.asciidoc index c919bba5c7651..856546b037fea 100644 --- a/docs/reference/indices/resolve.asciidoc +++ b/docs/reference/indices/resolve.asciidoc @@ -76,15 +76,15 @@ Resources on <> can be specified using the [[resolve-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] + Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index aa6f978b237d7..2a47d28e5358d 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -46,7 +46,7 @@ rollover also increments the data stream's generation. See [TIP] ==== -include::{es-repo-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=time-series-alias-tip] +include::{es-ref-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=time-series-alias-tip] See <>. ==== @@ -104,7 +104,7 @@ streams do not support this parameter. If the name of the alias's current write index does not end with `-` and a number, such as `my-index-000001` or `my-index-3`, this parameter is required. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=index-name-reqs] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=index-name-reqs] [[rollover-index-api-query-params]] ==== {api-query-parms-title} @@ -119,9 +119,9 @@ If `true`, checks whether the current index satisfies the specified If `true`, signals that the data stream will be rolled over when the next indexing operation occurs. Applies only to data streams. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[rollover-index-api-request-body]] @@ -131,7 +131,7 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] (Optional, object of objects) Aliases for the target index. Data streams do not support this parameter. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] [[rollover-conditions]] `conditions`:: @@ -152,14 +152,14 @@ instead. .Properties of `conditions` [%collapsible%open] ==== -include::{es-repo-dir}/ilm/actions/ilm-rollover.asciidoc[tag=rollover-conditions] +include::{es-ref-dir}/ilm/actions/ilm-rollover.asciidoc[tag=rollover-conditions] ==== -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=mappings] + Data streams do not support this parameter. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=settings] + Data streams do not support this parameter. diff --git a/docs/reference/indices/segments.asciidoc b/docs/reference/indices/segments.asciidoc index db13243c62d27..8621bc7ae1de9 100644 --- a/docs/reference/indices/segments.asciidoc +++ b/docs/reference/indices/segments.asciidoc @@ -40,50 +40,50 @@ and indices, omit this parameter or use `*` or `_all`. [[index-segments-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] [[index-segments-api-response-body]] ==== {api-response-body-title} ``:: (String) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment] `generation`:: (Integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=generation] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=generation] `num_docs`:: (Integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-count] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-count] `deleted_docs`:: (Integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted] `size_in_bytes`:: (Integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment-size] `committed`:: (Boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=committed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=committed] `search`:: (Boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment-search] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment-search] `version`:: (String) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=segment-version] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=segment-version] `compound`:: (Boolean) diff --git a/docs/reference/indices/shard-stores.asciidoc b/docs/reference/indices/shard-stores.asciidoc index 562c146949718..1b001a3175b8c 100644 --- a/docs/reference/indices/shard-stores.asciidoc +++ b/docs/reference/indices/shard-stores.asciidoc @@ -58,15 +58,15 @@ and indices, omit this parameter or use `*` or `_all`. [[index-shard-stores-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `status`:: + diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 5d5e6c24d9e83..244733282d46e 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -219,14 +219,14 @@ on index creation applies to the shrink index action as well. (Required, string) Name of the source index to shrink. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=target-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=target-index] [[shrink-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[shrink-index-api-request-body]] @@ -235,9 +235,9 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `aliases`:: (Optional, object of objects) Aliases for the resulting index. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=target-index-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=target-index-settings] `max_primary_shard_size`:: (Optional, <>) diff --git a/docs/reference/indices/simulate-template.asciidoc b/docs/reference/indices/simulate-template.asciidoc index 404aa70d72e70..c7397ace97886 100644 --- a/docs/reference/indices/simulate-template.asciidoc +++ b/docs/reference/indices/simulate-template.asciidoc @@ -132,7 +132,7 @@ The settings, mappings, and aliases that would be applied to matching indices. (Optional, object of objects) Aliases for the index. If the index template includes `data_stream`, this parameter is not supported. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=mappings] diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index 26ae0f19b177c..0c93b572639db 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -255,15 +255,15 @@ on index creation applies to the split index action as well. (Required, string) Name of the source index to split. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=target-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=target-index] [[split-index-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[split-index-api-request-body]] @@ -272,6 +272,6 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] `aliases`:: (Optional, object of objects) Aliases for the resulting index. + -include::{es-repo-dir}/indices/create-index.asciidoc[tag=aliases-props] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=target-index-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=target-index-settings] diff --git a/docs/reference/indices/stats.asciidoc b/docs/reference/indices/stats.asciidoc index 41b213b7a5b22..088d65c37ec6e 100644 --- a/docs/reference/indices/stats.asciidoc +++ b/docs/reference/indices/stats.asciidoc @@ -64,34 +64,34 @@ to which the shard contributed. used to limit the request. Supports wildcards (`*`). To target all data streams and indices, omit this parameter or use `*` or `_all`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-metric] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-metric] [[index-stats-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=completion-fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=completion-fields] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=fielddata-fields] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=fielddata-fields] `forbid_closed_indices`:: (Optional, Boolean) If `true`, statistics are *not* collected from closed indices. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=groups] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=groups] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=level] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=level] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-segment-file-sizes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-segment-file-sizes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=include-unloaded-segments] [[index-stats-api-example]] diff --git a/docs/reference/indices/update-settings.asciidoc b/docs/reference/indices/update-settings.asciidoc index 1ac9ecbb6a6a3..3b29946d5ed7d 100644 --- a/docs/reference/indices/update-settings.asciidoc +++ b/docs/reference/indices/update-settings.asciidoc @@ -44,17 +44,17 @@ and indices, omit this parameter or use `*` or `_all`. [[update-index-settings-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=flat-settings] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `preserve_existing`:: (Optional, Boolean) If `true`, existing index settings remain unchanged. @@ -70,7 +70,7 @@ NOTE: Changing index settings on an automatically closed index using the `reopen parameter will result in the index becoming unavailable momentarily while the index is in the process of reopening. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[update-index-settings-api-request-body]] diff --git a/docs/reference/ingest/apis/delete-pipeline.asciidoc b/docs/reference/ingest/apis/delete-pipeline.asciidoc index 368f9f2b02626..6f50251dbf1cd 100644 --- a/docs/reference/ingest/apis/delete-pipeline.asciidoc +++ b/docs/reference/ingest/apis/delete-pipeline.asciidoc @@ -59,7 +59,7 @@ use a value of `*`. [[delete-pipeline-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[delete-pipeline-api-api-example]] diff --git a/docs/reference/ingest/apis/get-pipeline.asciidoc b/docs/reference/ingest/apis/get-pipeline.asciidoc index 9208f05078769..71a261d97bdeb 100644 --- a/docs/reference/ingest/apis/get-pipeline.asciidoc +++ b/docs/reference/ingest/apis/get-pipeline.asciidoc @@ -62,7 +62,7 @@ To get all ingest pipelines, omit this parameter or use `*`. [[get-pipeline-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[get-pipeline-api-api-example]] diff --git a/docs/reference/ingest/apis/put-pipeline.asciidoc b/docs/reference/ingest/apis/put-pipeline.asciidoc index ab1139b999952..5b532dedf8e82 100644 --- a/docs/reference/ingest/apis/put-pipeline.asciidoc +++ b/docs/reference/ingest/apis/put-pipeline.asciidoc @@ -59,7 +59,7 @@ See also <>. version. If specified and the update is successful, the pipeline's version is incremented. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[put-pipeline-api-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/ingest/enrich.asciidoc b/docs/reference/ingest/enrich.asciidoc index 4688b21d37e7d..6642cdc2a74ce 100644 --- a/docs/reference/ingest/enrich.asciidoc +++ b/docs/reference/ingest/enrich.asciidoc @@ -112,7 +112,7 @@ that doesn't change frequently. [[enrich-prereqs]] ==== Prerequisites -include::{es-repo-dir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] +include::{es-ref-dir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] [[create-enrich-source-index]] ==== Add enrich data @@ -203,7 +203,7 @@ documents first and verifying enrich data was added correctly using the [[update-enrich-data]] ==== Update an enrich index -include::{es-repo-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index] +include::{es-ref-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index] If wanted, you can <> or <> any already ingested documents diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index 0995e3f643813..88d97d9422d5e 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -113,23 +113,23 @@ Classification configuration for inference. `num_top_classes`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `num_top_feature_importance_values`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `top_classes_results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] `prediction_field_type`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] [discrete] [[inference-processor-fill-mask-opt]] @@ -137,22 +137,22 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification `num_top_classes`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `tokenization`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ===== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] @@ -160,12 +160,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] @@ -173,12 +173,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] @@ -186,7 +186,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ===== @@ -196,18 +196,18 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `tokenization`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ===== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] @@ -215,12 +215,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] @@ -228,12 +228,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] @@ -241,7 +241,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ===== @@ -253,11 +253,11 @@ Regression configuration for inference. `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `num_top_feature_importance_values`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] [discrete] [[inference-processor-text-classification-opt]] @@ -268,22 +268,22 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num `num_top_classes`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `tokenization`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ===== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] @@ -291,16 +291,16 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] @@ -308,16 +308,16 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] @@ -325,7 +325,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ===== @@ -335,18 +335,18 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `tokenization`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ===== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] @@ -354,12 +354,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] @@ -367,12 +367,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] @@ -380,7 +380,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ===== @@ -391,19 +391,19 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `tokenization`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ===== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] @@ -411,16 +411,16 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] @@ -428,16 +428,16 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] @@ -445,7 +445,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ===== @@ -456,26 +456,26 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `labels`:: (Optional, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] `multi_label`:: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] `results_field`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field-processor] `tokenization`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ===== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] @@ -483,12 +483,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] @@ -496,12 +496,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] @@ -509,7 +509,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizati `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ===== diff --git a/docs/reference/mapping/explicit-mapping.asciidoc b/docs/reference/mapping/explicit-mapping.asciidoc index ba2d9ec166766..3b0de5fad0512 100644 --- a/docs/reference/mapping/explicit-mapping.asciidoc +++ b/docs/reference/mapping/explicit-mapping.asciidoc @@ -63,9 +63,9 @@ PUT /my-index-000001/_mapping [[update-mapping]] === Update the mapping of a field -include::{es-repo-dir}/indices/put-mapping.asciidoc[tag=change-field-mapping] +include::{es-ref-dir}/indices/put-mapping.asciidoc[tag=change-field-mapping] -include::{es-repo-dir}/indices/put-mapping.asciidoc[tag=rename-field] +include::{es-ref-dir}/indices/put-mapping.asciidoc[tag=rename-field] [discrete] [[view-mapping]] diff --git a/docs/reference/mapping/mapping-settings-limit.asciidoc b/docs/reference/mapping/mapping-settings-limit.asciidoc index 6e05e6ea60855..1976422164995 100644 --- a/docs/reference/mapping/mapping-settings-limit.asciidoc +++ b/docs/reference/mapping/mapping-settings-limit.asciidoc @@ -58,4 +58,4 @@ or setting the index setting `index.mapping.total_fields.ignore_dynamic_beyond_l unless a user starts to add a huge number of fields with really long names. Default is `Long.MAX_VALUE` (no limit). -include::{es-repo-dir}/data-streams/tsds-index-settings.asciidoc[tag=dimensions-limit] +include::{es-ref-dir}/data-streams/tsds-index-settings.asciidoc[tag=dimensions-limit] diff --git a/docs/reference/mapping/types/aggregate-metric-double.asciidoc b/docs/reference/mapping/types/aggregate-metric-double.asciidoc index cf9377dec005e..e702d34f07d4c 100644 --- a/docs/reference/mapping/types/aggregate-metric-double.asciidoc +++ b/docs/reference/mapping/types/aggregate-metric-double.asciidoc @@ -65,9 +65,9 @@ include::numeric.asciidoc[tag=time_series_metric] [%collapsible%open] ==== -include::{es-repo-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-gauge] +include::{es-ref-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-gauge] -include::{es-repo-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-null] +include::{es-ref-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-null] ==== [[aggregate-metric-double-uses]] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 14fe9d4963970..6294423985ec6 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -51,7 +51,7 @@ It is not possible to store multiple values in one `dense_vector` field. [[index-vectors-knn-search]] ==== Index vectors for kNN search -include::{es-repo-dir}/search/search-your-data/knn-search.asciidoc[tag=knn-def] +include::{es-ref-dir}/search/search-your-data/knn-search.asciidoc[tag=knn-def] Dense vector fields can be used to rank documents in <>. This lets you perform diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 8611205cb749d..5d6ede6acd5ac 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -220,11 +220,11 @@ then 101 Lucene documents would be created: one for the parent document, and one nested object. Because of the expense associated with `nested` mappings, Elasticsearch puts settings in place to guard against performance problems: -include::{es-repo-dir}/mapping/mapping-settings-limit.asciidoc[tag=nested-fields-limit] +include::{es-ref-dir}/mapping/mapping-settings-limit.asciidoc[tag=nested-fields-limit] In the previous example, the `user` mapping would count as only 1 towards this limit. -include::{es-repo-dir}/mapping/mapping-settings-limit.asciidoc[tag=nested-objects-limit] +include::{es-ref-dir}/mapping/mapping-settings-limit.asciidoc[tag=nested-objects-limit] To illustrate how this setting works, consider adding another `nested` type called `comments` to the previous example mapping. For each document, the combined number of `user` and `comment` diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index a78611c4d8d38..32f4964e8ca43 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -200,11 +200,11 @@ metric type. You can't update this parameter for existing fields. .Valid `time_series_metric` values for numeric fields [%collapsible%open] ==== -include::{es-repo-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-counter] +include::{es-ref-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-counter] -include::{es-repo-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-gauge] +include::{es-ref-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-gauge] -include::{es-repo-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-null] +include::{es-ref-dir}/data-streams/tsds.asciidoc[tag=time-series-metric-null] ==== + For a numeric time series metric, the `doc_values` parameter must be `true`. A diff --git a/docs/reference/migration/apis/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc index fd82bb3e0e6d2..67b4c113af2bc 100644 --- a/docs/reference/migration/apis/deprecation.asciidoc +++ b/docs/reference/migration/apis/deprecation.asciidoc @@ -5,7 +5,7 @@ Deprecation info ++++ -include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] The deprecation API is to be used to retrieve information about different cluster, node, and index level settings that use deprecated features that will diff --git a/docs/reference/migration/apis/feature-migration.asciidoc b/docs/reference/migration/apis/feature-migration.asciidoc index 9a6306dc2f596..e38639ac44531 100644 --- a/docs/reference/migration/apis/feature-migration.asciidoc +++ b/docs/reference/migration/apis/feature-migration.asciidoc @@ -5,7 +5,7 @@ Feature migration ++++ -include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] +include::{es-ref-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] Version upgrades sometimes require changes to how features store configuration information and data in system indices. The feature migration APIs enable you to diff --git a/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc b/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc index 87e7041109710..99c09b9b05385 100644 --- a/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc @@ -190,7 +190,7 @@ version of a major version to a remote cluster running any minor version in the following major version. For example, a local 7.17 cluster can search any remote 8.x cluster. -include::{es-repo-dir}/search/search-your-data/ccs-version-compat-matrix.asciidoc[] +include::{es-ref-dir}/search/search-your-data/ccs-version-compat-matrix.asciidoc[] IMPORTANT: For the {ref}/eql-search-api.html[EQL search API], the local and remote clusters must use the same {es} version if they have versions prior to 7.17.7 (included) or prior to 8.5.1 (included). diff --git a/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc index 988dff2d92328..f0cb968e082c5 100644 --- a/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/close-job.asciidoc @@ -59,7 +59,7 @@ results the job might have recently produced or might produce in the future. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-wildcard] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-wildcard] + You can close all jobs by using `_all` or by specifying `*` as the job identifier. @@ -69,7 +69,7 @@ identifier. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] `force`:: (Optional, Boolean) Use to close a failed job, or to forcefully close a job diff --git a/docs/reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc index 4e2501aa5a176..b80a248038aea 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-calendar-event.asciidoc @@ -30,7 +30,7 @@ events and delete the calendar, see the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] ``:: (Required, string) Identifier for the scheduled event. You can obtain this diff --git a/docs/reference/ml/anomaly-detection/apis/delete-calendar-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-calendar-job.asciidoc index 8b01fa1066f46..6720e236fd635 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-calendar-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-calendar-job.asciidoc @@ -23,11 +23,11 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-list] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-list] [[ml-delete-calendar-job-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/delete-calendar.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-calendar.asciidoc index 5f710ca50c555..6684366c6f336 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-calendar.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-calendar.asciidoc @@ -29,7 +29,7 @@ calendar. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] [[ml-delete-calendar-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/delete-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-datafeed.asciidoc index bf4b69b5d8bcb..64a1e4c336fe6 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-datafeed.asciidoc @@ -27,7 +27,7 @@ can delete it. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] [[ml-delete-datafeed-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/delete-filter.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-filter.asciidoc index e59e04fe60c1a..4b41347543e8e 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-filter.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-filter.asciidoc @@ -31,7 +31,7 @@ filter. For more information, see ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter-id] [[ml-delete-filter-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/delete-forecast.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-forecast.asciidoc index b2866ee834a24..74e6ce27084ad 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-forecast.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-forecast.asciidoc @@ -45,7 +45,7 @@ forecasts from the job. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-delete-forecast-query-parms]] diff --git a/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc index 7520a45f9211a..1bbe07fd44f49 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-job.asciidoc @@ -43,7 +43,7 @@ parameters as the delete job request. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-delete-job-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/delete-snapshot.asciidoc b/docs/reference/ml/anomaly-detection/apis/delete-snapshot.asciidoc index 6f39cdbb372e3..ad10de7a2ba0f 100644 --- a/docs/reference/ml/anomaly-detection/apis/delete-snapshot.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/delete-snapshot.asciidoc @@ -30,11 +30,11 @@ the `model_snapshot_id` in the results from the get jobs API. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] [[ml-delete-snapshot-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/flush-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/flush-job.asciidoc index ea449c82efece..68ff601749b4b 100644 --- a/docs/reference/ml/anomaly-detection/apis/flush-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/flush-job.asciidoc @@ -36,7 +36,7 @@ opened again before analyzing further data. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-flush-job-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/forecast.asciidoc b/docs/reference/ml/anomaly-detection/apis/forecast.asciidoc index 80ee7c96b22a9..3e6067ab05857 100644 --- a/docs/reference/ml/anomaly-detection/apis/forecast.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/forecast.asciidoc @@ -45,7 +45,7 @@ error occurs if you try to create a forecast for a job that has an ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-forecast-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/get-bucket.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-bucket.asciidoc index a234901be1c47..bca839d1db318 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-bucket.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-bucket.asciidoc @@ -32,7 +32,7 @@ bucket. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Optional, string) The timestamp of a single bucket result. If you do not @@ -57,7 +57,7 @@ specific timestamps. `exclude_interim`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-interim-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-interim-results] `expand`:: (Optional, Boolean) If true, the output includes anomaly records. Defaults to `false`. @@ -137,11 +137,11 @@ initial value that was calculated at the time the bucket was processed. `is_interim`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=is-interim] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=is-interim] `job_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `probability`::: (number) The probability that the bucket has this behavior, in the range 0 to 1. @@ -161,7 +161,7 @@ this. `bucket_span`:: (number) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-span-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-span-results] `event_count`:: (number) The number of input data records processed in this bucket. @@ -172,11 +172,11 @@ the initial value that was calculated at the time the bucket was processed. `is_interim`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=is-interim] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=is-interim] `job_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `processing_time_ms`:: (number) The amount of time, in milliseconds, that it took to analyze the diff --git a/docs/reference/ml/anomaly-detection/apis/get-calendar-event.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-calendar-event.asciidoc index 803871ddea343..fc06e286bf46c 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-calendar-event.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-calendar-event.asciidoc @@ -31,7 +31,7 @@ For more information, see ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] + You can get scheduled event information for multiple calendars in a single API request by using a comma-separated list of ids or a wildcard expression. @@ -89,7 +89,7 @@ following properties: `calendar_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] `description`:: (string) A description of the scheduled event. diff --git a/docs/reference/ml/anomaly-detection/apis/get-calendar.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-calendar.asciidoc index ffa19b3b1093a..b2c46bbe16c0e 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-calendar.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-calendar.asciidoc @@ -31,7 +31,7 @@ For more information, see ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] + You can get information for multiple calendars in a single API request by using a comma-separated list of ids or a wildcard expression. You can get information @@ -76,7 +76,7 @@ properties: `calendar_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] `job_ids`:: (array) An array of {anomaly-job} identifiers. For example: diff --git a/docs/reference/ml/anomaly-detection/apis/get-category.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-category.asciidoc index 034a598b73370..33de5e0f71a08 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-category.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-category.asciidoc @@ -39,7 +39,7 @@ examine the description and examples of that category. For more information, see ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Optional, long) Identifier for the category, which is unique in the job. If you @@ -104,7 +104,7 @@ manual tweaking. `job_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `max_matching_length`:: (unsigned integer) The maximum length of the fields that matched the category. diff --git a/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc index bb4ead5a6fd4c..a224f3880a1ad 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-datafeed-stats.asciidoc @@ -39,7 +39,7 @@ IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id-wildcard] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id-wildcard] + You can get statistics for multiple {dfeeds} in a single API request by using a comma-separated list of {dfeeds} or a wildcard expression. You can get @@ -51,7 +51,7 @@ identifier, or by omitting the identifier. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] [role="child_attributes"] [[ml-get-datafeed-stats-results]] @@ -62,30 +62,30 @@ informational; you cannot update their values. `assignment_explanation`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-datafeeds] `datafeed_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] `node`:: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-datafeeds] + -- [%collapsible%open] ==== `attributes`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-attributes] `ephemeral_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] `id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-id] `name`::: (string) @@ -93,7 +93,7 @@ The node name. For example, `0-o0tOo`. `transport_address`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] ==== -- @@ -130,7 +130,7 @@ The start time as an epoch in milliseconds. `state`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=state-datafeed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=state-datafeed] `timing_stats`:: (object) An object that provides statistical information about timing aspect of @@ -141,24 +141,24 @@ this {dfeed}. ==== `average_search_time_per_bucket_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-bucket-avg] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-bucket-avg] `bucket_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-count] `exponential_average_search_time_per_hour_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-exp-avg-hour] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-exp-avg-hour] `job_id`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `search_count`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-count] `total_search_time_ms`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=search-time] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=search-time] ==== -- diff --git a/docs/reference/ml/anomaly-detection/apis/get-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-datafeed.asciidoc index f3dd3b5f4da7d..a986e2220f928 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-datafeed.asciidoc @@ -36,7 +36,7 @@ IMPORTANT: This API returns a maximum of 10,000 {dfeeds}. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id-wildcard] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id-wildcard] + You can get information for multiple {dfeeds} in a single API request by using a comma-separated list of {dfeeds} or a wildcard expression. You can get @@ -48,11 +48,11 @@ information for all {dfeeds} by using `_all`, by specifying `*` as the `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] `exclude_generated`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] [[ml-get-datafeed-results]] == {api-response-body-title} diff --git a/docs/reference/ml/anomaly-detection/apis/get-filter.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-filter.asciidoc index 29e11172b9e0f..f73dcd236f1af 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-filter.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-filter.asciidoc @@ -31,7 +31,7 @@ You can get a single filter or all filters. For more information, see ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter-id] [[ml-get-filter-query-parms]] == {api-query-parms-title} @@ -54,7 +54,7 @@ properties: `filter_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter-id] `items`:: (array of strings) An array of strings which is the filter item list. diff --git a/docs/reference/ml/anomaly-detection/apis/get-influencer.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-influencer.asciidoc index 76ead2921df6f..31489e361a848 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-influencer.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-influencer.asciidoc @@ -30,7 +30,7 @@ the anomalies. Influencer results are available only if an ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-get-influencer-query-parms]] @@ -38,7 +38,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `desc`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=desc-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=desc-results] `end`:: (Optional, string) Returns influencers with timestamps earlier than this time. @@ -47,7 +47,7 @@ specific timestamps. `exclude_interim`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-interim-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-interim-results] `from`:: (Optional, integer) @@ -99,7 +99,7 @@ properties: `bucket_span`:: (number) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-span-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-span-results] `influencer_score`:: (number) A normalized score between 0-100, which is based on the probability of @@ -121,11 +121,11 @@ calculated at the time the bucket was processed. `is_interim`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=is-interim] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=is-interim] `job_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `probability`:: (number) The probability that the influencer has this behavior, in the range 0 @@ -138,7 +138,7 @@ human-readable and friendly interpretation of this. `timestamp`:: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=timestamp-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=timestamp-results] NOTE: Additional influencer properties are added, depending on the fields being analyzed. For example, if it's analyzing `user_name` as an influencer, then a diff --git a/docs/reference/ml/anomaly-detection/apis/get-job-model-snapshot-upgrade-stats.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-job-model-snapshot-upgrade-stats.asciidoc index e1354648e385e..0939282a75916 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-job-model-snapshot-upgrade-stats.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-job-model-snapshot-upgrade-stats.asciidoc @@ -36,7 +36,7 @@ returned. ``:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-wildcard] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-wildcard] ``:: (string) @@ -51,7 +51,7 @@ use wildcard expressions or `_all`. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] [role="child_attributes"] [[ml-get-job-model-snapshot-upgrade-stats-results]] @@ -62,11 +62,11 @@ All of these properties are informational; you cannot update their values. `assignment_explanation`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-datafeeds] `job_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `node`:: (object) @@ -78,15 +78,15 @@ available only for upgrade tasks that are assigned to a node. ==== `attributes`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-attributes] `ephemeral_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] `id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-id] `name`::: (string) @@ -94,13 +94,13 @@ The node name. For example, `0-o0tOo`. `transport_address`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] ==== -- `snapshot_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-id] `state`:: (string) diff --git a/docs/reference/ml/anomaly-detection/apis/get-job-stats.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-job-stats.asciidoc index 4c1fbfe2da3de..a5bd188397bb9 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-job-stats.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-job-stats.asciidoc @@ -46,7 +46,7 @@ omitting the identifier. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] [role="child_attributes"] [[ml-get-job-stats-results]] @@ -57,7 +57,7 @@ job: `assignment_explanation`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-anomaly-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-explanation-anomaly-jobs] //Begin data_counts [[datacounts]]`data_counts`:: @@ -71,75 +71,75 @@ counts are not reset. ==== `bucket_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-count-anomaly-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-count-anomaly-jobs] `earliest_record_timestamp`::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=earliest-record-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=earliest-record-timestamp] `empty_bucket_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=empty-bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=empty-bucket-count] `input_bytes`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=input-bytes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=input-bytes] `input_field_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=input-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=input-field-count] `input_record_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=input-record-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=input-record-count] `invalid_date_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=invalid-date-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=invalid-date-count] `job_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `last_data_time`::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=last-data-time] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=last-data-time] `latest_empty_bucket_timestamp`::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latest-empty-bucket-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latest-empty-bucket-timestamp] `latest_record_timestamp`::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latest-record-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latest-record-timestamp] `latest_sparse_bucket_timestamp`::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latest-sparse-record-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latest-sparse-record-timestamp] `log_time`::: (date) The timestamp of the `data_counts` according to server time. `missing_field_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=missing-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=missing-field-count] + The value of `processed_record_count` includes this count. `out_of_order_timestamp_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=out-of-order-timestamp-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=out-of-order-timestamp-count] `processed_field_count`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=processed-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=processed-field-count] `processed_record_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=processed-record-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=processed-record-count] `sparse_bucket_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=sparse-bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=sparse-bucket-count] ==== //End data_counts @@ -183,13 +183,13 @@ forecasts related to this job. If there are no forecasts, this property is omitt `total`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=forecast-total] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=forecast-total] ==== //End forecasts_stats `job_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] //Begin model_size_stats [[modelsizestats]]`model_size_stats`:: @@ -201,85 +201,85 @@ model. ==== `assignment_memory_basis`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-memory-basis] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-memory-basis] `bucket_allocation_failures_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-allocation-failures-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-allocation-failures-count] `categorized_doc_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorized-doc-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorized-doc-count] `categorization_status`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorization-status] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorization-status] `dead_category_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dead-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dead-category-count] `failed_category_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=failed-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=failed-category-count] `frequent_category_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=frequent-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=frequent-category-count] `job_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `log_time`::: (date) The timestamp of the `model_size_stats` according to server time. `memory_status`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-status] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-status] `model_bytes`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-bytes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-bytes] `model_bytes_exceeded`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-bytes-exceeded] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-bytes-exceeded] `model_bytes_memory_limit`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-anomaly-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-anomaly-jobs] `peak_model_bytes`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=peak-model-bytes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=peak-model-bytes] `rare_category_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=rare-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=rare-category-count] `result_type`::: (string) For internal use. The type of result. `total_by_field_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-by-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-by-field-count] `total_category_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-category-count] `total_over_field_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-over-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-over-field-count] `total_partition_field_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=total-partition-field-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=total-partition-field-count] `timestamp`::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-timestamp] ==== //End model_size_stats @@ -293,32 +293,32 @@ available only for open jobs. ==== `attributes`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-attributes] `ephemeral_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] `id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-id] `name`::: (string) The node name. `transport_address`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] ==== //End node `open_time`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=open-time] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=open-time] `state`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=state-anomaly-job] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=state-anomaly-job] //Begin timing_stats [[timingstats]]`timing_stats`:: @@ -333,31 +333,31 @@ this job. `bucket_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-count] `exponential_average_bucket_processing_time_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average] `exponential_average_bucket_processing_time_per_hour_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average-hour] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-exponential-average-hour] `job_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `maximum_bucket_processing_time_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-maximum] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-maximum] `minimum_bucket_processing_time_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-minimum] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-minimum] `total_bucket_processing_time_ms`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-time-total] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-time-total] ==== //End timing_stats diff --git a/docs/reference/ml/anomaly-detection/apis/get-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-job.asciidoc index ac8746d761e06..4ee6c429ce730 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-job.asciidoc @@ -46,11 +46,11 @@ omitting the identifier. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] `exclude_generated`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] [role="child_attributes"] [[ml-get-job-results]] @@ -121,83 +121,83 @@ the account name is listed in the response. `datafeed_id`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] `aggregations`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=aggregations] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=aggregations] `chunking_config`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=chunking-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=chunking-config] + .Properties of `chunking_config` [%collapsible%open] ===== `mode`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=mode] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=mode] `time_span`::: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=time-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=time-span] ===== `delayed_data_check_config`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] + .Properties of `delayed_data_check_config` [%collapsible%open] ===== `check_window`:: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] `enabled`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] ===== `frequency`::: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=frequency] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=frequency] `indices`::: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices] `indices_options`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices-options] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices-options] `job_id`::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `max_empty_searches`::: (Optional,integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] `query`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query] `query_delay`::: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query-delay] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query-delay] `runtime_mappings`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] `script_fields`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=script-fields] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=script-fields] `scroll_size`::: (Optional, unsigned integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=scroll-size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=scroll-size] ==== `finished_time`:: @@ -218,7 +218,7 @@ independently. The `job_version` value represents the new version number. `model_snapshot_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-id] [[ml-get-job-response-codes]] == {api-response-codes-title} diff --git a/docs/reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc index cf8546d78fe62..b581b5c3a2eb2 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-overall-buckets.asciidoc @@ -49,7 +49,7 @@ a span equal to the jobs' largest bucket span. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-wildcard-list] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-wildcard-list] + You can summarize the bucket results for all {anomaly-jobs} by using `_all` or by specifying `*` as the job identifier. @@ -59,7 +59,7 @@ by specifying `*` as the job identifier. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-jobs] `bucket_span`:: (Optional, string) The span of the overall buckets. Must be greater or equal to @@ -108,7 +108,7 @@ of the job with the longest one. `is_interim`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=is-interim] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=is-interim] `jobs`:: (array) An array of objects that contain the `max_anomaly_score` per `job_id`. @@ -121,7 +121,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=is-interim] `timestamp`:: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=timestamp-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=timestamp-results] [[ml-get-overall-buckets-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/get-record.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-record.asciidoc index 686632366315e..e74ab3ecb4b12 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-record.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-record.asciidoc @@ -39,14 +39,14 @@ of detectors. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-get-record-query-parms]] == {api-query-parms-title} `desc`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=desc-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=desc-results] `end`:: (Optional, string) Returns records with timestamps earlier than this time. @@ -55,7 +55,7 @@ specific timestamps. `exclude_interim`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-interim-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-interim-results] `from`:: (Optional, integer) Skips the specified number of records. Defaults to `0`. @@ -158,11 +158,11 @@ current bucket. `bucket_span`:: (number) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-span-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-span-results] `by_field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=by-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=by-field-name] `by_field_value`:: (string) The value of `by_field_name`. @@ -220,11 +220,11 @@ at the time the bucket was processed. `is_interim`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=is-interim] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=is-interim] `job_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `multi_bucket_impact`:: (number) An indication of how strongly an anomaly is multi bucket or single @@ -234,14 +234,14 @@ bucket. `over_field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=over-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=over-field-name] `over_field_value`:: (string) The value of `over_field_name`. `partition_field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=partition-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=partition-field-name] `partition_field_value`:: (string) The value of `partition_field_name`. @@ -262,7 +262,7 @@ be updated by a re-normalization process as new data is analyzed. `timestamp`:: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=timestamp-results] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=timestamp-results] `typical`:: (array) The typical value for the bucket, according to analytical modeling. diff --git a/docs/reference/ml/anomaly-detection/apis/get-snapshot.asciidoc b/docs/reference/ml/anomaly-detection/apis/get-snapshot.asciidoc index 0132f57d412ef..d94bd4060854a 100644 --- a/docs/reference/ml/anomaly-detection/apis/get-snapshot.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/get-snapshot.asciidoc @@ -26,11 +26,11 @@ Requires the `monitor_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + -- You can get information for multiple snapshots by using a comma-separated list @@ -122,7 +122,7 @@ independently. The `min_version` value represents the new version number. ==== `assignment_memory_basis`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=assignment-memory-basis] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=assignment-memory-basis] `bucket_allocation_failures_count`::: (long) The number of buckets for which entities were not processed due to memory @@ -155,7 +155,7 @@ side effect of the way categorization has no prior training.) `failed_category_count`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=failed-category-count] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=failed-category-count] `frequent_category_count`::: (long) The number of categories that match more than 1% of categorized @@ -163,7 +163,7 @@ documents. `job_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `log_time`::: (date) The timestamp that the `model_size_stats` were recorded, according to @@ -223,7 +223,7 @@ separately for each detector and partition. `retain`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=retain] `snapshot_id`:: (string) A numerical character string that uniquely identifies the model diff --git a/docs/reference/ml/anomaly-detection/apis/open-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/open-job.asciidoc index 92d8908baab41..385f672f467f6 100644 --- a/docs/reference/ml/anomaly-detection/apis/open-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/open-job.asciidoc @@ -36,7 +36,7 @@ data is received. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-open-job-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/post-data.asciidoc b/docs/reference/ml/anomaly-detection/apis/post-data.asciidoc index e5b8a30055f9a..931efcf8c2a52 100644 --- a/docs/reference/ml/anomaly-detection/apis/post-data.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/post-data.asciidoc @@ -52,7 +52,7 @@ or a comma-separated list. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-post-data-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/preview-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/preview-datafeed.asciidoc index c838af78871d6..243cd2a5f32a3 100644 --- a/docs/reference/ml/anomaly-detection/apis/preview-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/preview-datafeed.asciidoc @@ -52,7 +52,7 @@ supply the credentials. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] + NOTE: If you provide the `` as a path parameter, you cannot provide {dfeed} or {anomaly-job} configuration details in the request body. diff --git a/docs/reference/ml/anomaly-detection/apis/put-calendar-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-calendar-job.asciidoc index 2483f24f6afd4..0c19a08cbd74b 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-calendar-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-calendar-job.asciidoc @@ -23,11 +23,11 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-list] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-list] [[ml-put-calendar-job-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/put-calendar.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-calendar.asciidoc index 8ddaea995c710..fd2b58a31737a 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-calendar.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-calendar.asciidoc @@ -29,7 +29,7 @@ For more information, see ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=calendar-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=calendar-id] [[ml-put-calendar-request-body]] == {api-request-body-title} diff --git a/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc index 478a70e23b93f..47e3059666d76 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-datafeed.asciidoc @@ -52,7 +52,7 @@ credentials are used instead. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] [[ml-put-datafeed-query-params]] == {api-query-parms-title} @@ -62,11 +62,11 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] concrete indices are ignored. This includes the `_all` string or when no indices are specified. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] `ignore_unavailable`:: (Optional, Boolean) If `true`, unavailable indices (missing or closed) are @@ -79,79 +79,79 @@ ignored. Defaults to `false`. `aggregations`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=aggregations] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=aggregations] `chunking_config`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=chunking-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=chunking-config] + .Properties of `chunking_config` [%collapsible%open] ==== `mode`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=mode] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=mode] `time_span`::: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=time-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=time-span] ==== `delayed_data_check_config`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] + .Properties of `delayed_data_check_config` [%collapsible%open] ==== `check_window`:: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] `enabled`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] ==== `frequency`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=frequency] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=frequency] `indices`:: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices] `indices_options`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices-options] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices-options] `job_id`:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] `max_empty_searches`:: (Optional,integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] `query`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query] `query_delay`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query-delay] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query-delay] `runtime_mappings`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] `script_fields`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=script-fields] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=script-fields] `scroll_size`:: (Optional, unsigned integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=scroll-size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=scroll-size] [[ml-put-datafeed-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/put-filter.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-filter.asciidoc index 4e0d2ed1fabcf..b50ba8cb1e23b 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-filter.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-filter.asciidoc @@ -31,7 +31,7 @@ configuration objects. For more information, see ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter-id] [[ml-put-filter-request-body]] == {api-request-body-title} diff --git a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc index e4e10e2ae2fc5..012904a9affa7 100644 --- a/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/put-job.asciidoc @@ -42,7 +42,7 @@ credentials are used instead. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-define] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-define] [role="child_attributes"] [[ml-put-job-request-body]] @@ -50,31 +50,31 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection-define `allow_lazy_open`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-lazy-open] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-lazy-open] //Begin analysis_config [[put-analysisconfig]]`analysis_config`:: (Required, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=analysis-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=analysis-config] + .Properties of `analysis_config` [%collapsible%open] ==== `bucket_span`::: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=bucket-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=bucket-span] `categorization_analyzer`::: (object or string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorization-analyzer] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorization-analyzer] `categorization_field_name`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorization-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorization-field-name] `categorization_filters`::: (array of strings) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorization-filters] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorization-filters] //Begin analysis_config.detectors `detectors`::: @@ -90,12 +90,12 @@ no analysis can occur and an error is returned. ===== `by_field_name`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=by-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=by-field-name] //Begin analysis_config.detectors.custom_rules [[put-customrules]]`custom_rules`:::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules] + .Properties of `custom_rules` [%collapsible%open] @@ -103,45 +103,45 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules] `actions`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-actions] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-actions] //Begin analysis_config.detectors.custom_rules.conditions `conditions`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] + .Properties of `conditions` [%collapsible%open] ======= `applies_to`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-applies-to] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-applies-to] `operator`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-operator] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-operator] `value`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-value] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-value] ======= //End analysis_config.detectors.custom_rules.conditions //Begin analysis_config.detectors.custom_rules.scope `scope`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope] + .Properties of `scope` [%collapsible%open] ======= `filter_id`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-id] `filter_type`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] ======= //End analysis_config.detectors.custom_rules.scope ====== @@ -149,114 +149,114 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] `detector_description`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-description] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-description] `detector_index`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-index] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-index] + If you specify a value for this property, it is ignored. `exclude_frequent`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-frequent] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-frequent] `field_name`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-field-name] `function`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=function] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=function] `over_field_name`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=over-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=over-field-name] `partition_field_name`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=partition-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=partition-field-name] `use_null`:::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=use-null] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=use-null] ===== //End analysis_config.detectors `influencers`::: (array of strings) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=influencers] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=influencers] `latency`::: (time units) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=latency] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=latency] `model_prune_window`::: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-prune-window] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-prune-window] `multivariate_by_fields`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=multivariate-by-fields] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=multivariate-by-fields] //Begin analysis_config.per_partition_categorization `per_partition_categorization`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization] + .Properties of `per_partition_categorization` [%collapsible%open] ===== `enabled`:::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-enabled] `stop_on_warn`:::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-stop-on-warn] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-stop-on-warn] ===== //End analysis_config.per_partition_categorization `summary_count_field_name`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=summary-count-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=summary-count-field-name] ==== //End analysis_config //Begin analysis_limits [[put-analysislimits]]`analysis_limits`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=analysis-limits] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=analysis-limits] + .Properties of `analysis_limits` [%collapsible%open] ==== `categorization_examples_limit`::: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=categorization-examples-limit] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=categorization-examples-limit] `model_memory_limit`::: (long or string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-ad] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-ad] ==== //End analysis_limits `background_persist_interval`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=background-persist-interval] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=background-persist-interval] [[put-customsettings]]`custom_settings`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-settings] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-settings] [[put-dailymodelsnapshotretentionafterdays]]`daily_model_snapshot_retention_after_days`:: (Optional, long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=daily-model-snapshot-retention-after-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=daily-model-snapshot-retention-after-days] //Begin data_description [[put-datadescription]]`data_description`:: (Required, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=data-description] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=data-description] //End data_description [[put-datafeedconfig]]`datafeed_config`:: @@ -269,81 +269,81 @@ from {es} for analysis by the job. You can associate only one {dfeed} with each ==== `aggregations`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=aggregations] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=aggregations] `chunking_config`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=chunking-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=chunking-config] + .Properties of `chunking_config` [%collapsible%open] ===== `mode`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=mode] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=mode] `time_span`::: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=time-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=time-span] ===== `datafeed_id`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] + Defaults to the same ID as the {anomaly-job}. `delayed_data_check_config`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] + .Properties of `delayed_data_check_config` [%collapsible%open] ===== `check_window`:: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] `enabled`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] ===== `frequency`::: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=frequency] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=frequency] `indices`::: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices] `indices_options`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices-options] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices-options] `max_empty_searches`::: (Optional,integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] `query`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query] `query_delay`::: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query-delay] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query-delay] `runtime_mappings`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] `script_fields`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=script-fields] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=script-fields] `scroll_size`::: (Optional, unsigned integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=scroll-size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=scroll-size] ==== `description`:: @@ -351,45 +351,45 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=scroll-size] `groups`:: (Optional, array of strings) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=groups] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=groups] //Begin model_plot_config `model_plot_config`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config] + .Properties of `model_plot_config` [%collapsible%open] ==== `annotations_enabled`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-annotations-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-annotations-enabled] `enabled`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-enabled] `terms`::: experimental[] (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-terms] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-terms] ==== //End model_plot_config [[put-modelsnapshotretentiondays]]`model_snapshot_retention_days`:: (Optional, long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-retention-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-retention-days] [[put-renormalizationwindowdays]]`renormalization_window_days`:: (Optional, long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=renormalization-window-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=renormalization-window-days] [[put-resultsindexname]]`results_index_name`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=results-index-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=results-index-name] [[put-resultsretentiondays]]`results_retention_days`:: (Optional, long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=results-retention-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=results-retention-days] [[ml-put-job-example]] == {api-examples-title} diff --git a/docs/reference/ml/anomaly-detection/apis/reset-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/reset-job.asciidoc index 4076e4a8038ef..9009d634a2e9c 100644 --- a/docs/reference/ml/anomaly-detection/apis/reset-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/reset-job.asciidoc @@ -35,7 +35,7 @@ separated list. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [[ml-reset-job-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc b/docs/reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc index 4b3a61133dfb3..c8d7a27ee2047 100644 --- a/docs/reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/revert-snapshot.asciidoc @@ -39,11 +39,11 @@ NOTE: Reverting to a snapshot does not change the `data_counts` values of the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] + -- You can specify `empty` as the . Reverting to the `empty` snapshot diff --git a/docs/reference/ml/anomaly-detection/apis/start-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/start-datafeed.asciidoc index 6d958383f337f..b54c80133d7dc 100644 --- a/docs/reference/ml/anomaly-detection/apis/start-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/start-datafeed.asciidoc @@ -43,7 +43,7 @@ you created or updated the {dfeed}, those credentials are used instead. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] [[ml-start-datafeed-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/anomaly-detection/apis/stop-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/stop-datafeed.asciidoc index eb50e0a154a58..bc15a1de8a056 100644 --- a/docs/reference/ml/anomaly-detection/apis/stop-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/stop-datafeed.asciidoc @@ -45,7 +45,7 @@ identifier. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-datafeeds] `force`:: (Optional, Boolean) If true, the {dfeed} is stopped forcefully. diff --git a/docs/reference/ml/anomaly-detection/apis/update-datafeed.asciidoc b/docs/reference/ml/anomaly-detection/apis/update-datafeed.asciidoc index 5e6121cd01ac9..b3920d9d4f809 100644 --- a/docs/reference/ml/anomaly-detection/apis/update-datafeed.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/update-datafeed.asciidoc @@ -40,7 +40,7 @@ credentials are used instead. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] [[ml-update-datafeed-query-params]] == {api-query-parms-title} @@ -50,11 +50,11 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=datafeed-id] concrete indices are ignored. This includes the `_all` string or when no indices are specified. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] `ignore_unavailable`:: (Optional, Boolean) If `true`, unavailable indices (missing or closed) are @@ -68,55 +68,55 @@ The following properties can be updated after the {dfeed} is created: `aggregations`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=aggregations] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=aggregations] `chunking_config`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=chunking-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=chunking-config] + .Properties of `chunking_config` [%collapsible%open] ==== `mode`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=mode] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=mode] `time_span`::: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=time-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=time-span] ==== `delayed_data_check_config`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config] + .Properties of `delayed_data_check_config` [%collapsible%open] ==== `check_window`:: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-check-window] `enabled`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=delayed-data-check-config-enabled] ==== `frequency`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=frequency] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=frequency] `indices`:: (Optional, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices] `indices_options`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=indices-options] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=indices-options] `max_empty_searches`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-empty-searches] + -- The special value `-1` unsets this setting. @@ -124,7 +124,7 @@ The special value `-1` unsets this setting. `query`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query] + -- WARNING: If you change the query, the analyzed data is also changed. Therefore, @@ -138,19 +138,19 @@ the results of the other job. `query_delay`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=query-delay] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=query-delay] `runtime_mappings`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=runtime-mappings] `script_fields`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=script-fields] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=script-fields] `scroll_size`:: (Optional, unsigned integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=scroll-size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=scroll-size] [[ml-update-datafeed-example]] diff --git a/docs/reference/ml/anomaly-detection/apis/update-filter.asciidoc b/docs/reference/ml/anomaly-detection/apis/update-filter.asciidoc index 46e853e082658..a4221c37a438e 100644 --- a/docs/reference/ml/anomaly-detection/apis/update-filter.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/update-filter.asciidoc @@ -23,7 +23,7 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter-id] [[ml-update-filter-request-body]] == {api-request-body-title} diff --git a/docs/reference/ml/anomaly-detection/apis/update-job.asciidoc b/docs/reference/ml/anomaly-detection/apis/update-job.asciidoc index fd645fe0d9bf6..6953235c854cb 100644 --- a/docs/reference/ml/anomaly-detection/apis/update-job.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/update-job.asciidoc @@ -23,7 +23,7 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] [role="child_attributes"] [[ml-update-job-request-body]] @@ -33,7 +33,7 @@ The following properties can be updated after the job is created: `allow_lazy_open`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-lazy-open] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-lazy-open] + -- NOTE: If the job is open when you make the update, you must stop the {dfeed}, @@ -43,7 +43,7 @@ close the job, then reopen the job and restart the {dfeed} for the changes to ta //Begin analysis_limits [[update-analysislimits]]`analysis_limits`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=analysis-limits] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=analysis-limits] + You can update the `analysis_limits` only while the job is closed. + @@ -52,7 +52,7 @@ You can update the `analysis_limits` only while the job is closed. ==== `model_memory_limit`::: (long or string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-ad] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-ad] + -- [NOTE] @@ -71,7 +71,7 @@ to re-run the job with an increased `model_memory_limit`. `background_persist_interval`:: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=background-persist-interval] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=background-persist-interval] + -- NOTE: If the job is open when you make the update, you must stop the {dfeed}, @@ -81,11 +81,11 @@ close the job, then reopen the job and restart the {dfeed} for the changes to ta [[update-customsettings]]`custom_settings`:: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-settings] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-settings] `daily_model_snapshot_retention_after_days`:: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=daily-model-snapshot-retention-after-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=daily-model-snapshot-retention-after-days] `description`:: (string) A description of the job. @@ -101,7 +101,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=daily-model-snapshot-retention- //Begin detectors.custom_rules `custom_rules`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules] + .Properties of `custom_rules` [%collapsible%open] @@ -109,12 +109,12 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules] `actions`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-actions] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-actions] // Begin detectors.custom_rules.conditions `conditions`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] + .Properties of `conditions` [%collapsible%open] @@ -122,33 +122,33 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] `applies_to`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-applies-to] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-applies-to] `operator`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-operator] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-operator] `value`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-value] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-value] ====== //End detectors.custom_rules.conditions //Begin detectors.custom_rules.scope `scope`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope] + .Properties of `scope` [%collapsible%open] ====== `filter_id`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-id] `filter_type`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] ====== //End detectors.custom_rules.scope ===== @@ -156,11 +156,11 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] `description`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-description] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-description] `detector_index`::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-index] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-index] + -- If you want to update a specific detector, you must use this identifier. You @@ -171,59 +171,59 @@ cannot, however, change the `detector_index` value for a detector. `groups`:: (array of strings) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=groups] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=groups] //Begin model_plot_config `model_plot_config`:: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config] + .Properties of `model_plot_config` [%collapsible%open] ==== `annotations_enabled`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-annotations-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-annotations-enabled] `enabled`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-enabled] `terms`::: experimental[] (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-terms] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-plot-config-terms] ==== //End model_plot_config `model_prune_window`:: (<>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-prune-window] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-prune-window] `model_snapshot_retention_days`:: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-retention-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-snapshot-retention-days] //Begin per_partition_categorization `per_partition_categorization`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization] + .Properties of `per_partition_categorization` [%collapsible%open] ==== `enabled`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-enabled] `stop_on_warn`::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-stop-on-warn] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=per-partition-categorization-stop-on-warn] ==== //End per_partition_categorization `renormalization_window_days`:: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=renormalization-window-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=renormalization-window-days] + -- NOTE: If the job is open when you make the update, you must stop the {dfeed}, @@ -233,7 +233,7 @@ close the job, then reopen the job and restart the {dfeed} for the changes to ta `results_retention_days`:: (long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=results-retention-days] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=results-retention-days] [[ml-update-job-example]] diff --git a/docs/reference/ml/anomaly-detection/apis/update-snapshot.asciidoc b/docs/reference/ml/anomaly-detection/apis/update-snapshot.asciidoc index 2978d64f01223..f8c0384860029 100644 --- a/docs/reference/ml/anomaly-detection/apis/update-snapshot.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/update-snapshot.asciidoc @@ -23,11 +23,11 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] [[ml-update-snapshot-request-body]] == {api-request-body-title} @@ -39,7 +39,7 @@ The following properties can be updated after the model snapshot is created: `retain`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=retain] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=retain] [[ml-update-snapshot-example]] diff --git a/docs/reference/ml/anomaly-detection/apis/upgrade-job-model-snapshot.asciidoc b/docs/reference/ml/anomaly-detection/apis/upgrade-job-model-snapshot.asciidoc index 3f09b292dc2d2..3a74e3b2296df 100644 --- a/docs/reference/ml/anomaly-detection/apis/upgrade-job-model-snapshot.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/upgrade-job-model-snapshot.asciidoc @@ -42,11 +42,11 @@ snapshot cannot be the current snapshot of the {anomaly-job}. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-anomaly-detection] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=snapshot-id] [[ml-upgrade-job-model-snapshot-query-parms]] diff --git a/docs/reference/ml/anomaly-detection/apis/validate-detector.asciidoc b/docs/reference/ml/anomaly-detection/apis/validate-detector.asciidoc index 5c312264cf718..c71673be7dc00 100644 --- a/docs/reference/ml/anomaly-detection/apis/validate-detector.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/validate-detector.asciidoc @@ -29,78 +29,78 @@ before you create an {anomaly-job}. `by_field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=by-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=by-field-name] `custom_rules`:: + -- (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules] `analysis_config`.`detectors`.`custom_rules`.`actions`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-actions] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-actions] `analysis_config`.`detectors`.`custom_rules`.`scope`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope] `analysis_config`.`detectors`.`custom_rules`.`scope`.`filter_id`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-id] `analysis_config`.`detectors`.`custom_rules`.`scope`.`filter_type`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-scope-filter-type] `analysis_config`.`detectors`.`custom_rules`.`conditions`::: (array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions] `analysis_config`.`detectors`.`custom_rules`.`conditions`.`applies_to`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-applies-to] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-applies-to] `analysis_config`.`detectors`.`custom_rules`.`conditions`.`operator`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-operator] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-operator] `analysis_config`.`detectors`.`custom_rules`.`conditions`.`value`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-value] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-rules-conditions-value] -- `detector_description`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-description] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-description] `detector_index`:: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-index] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-index] `exclude_frequent`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-frequent] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-frequent] `field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=detector-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=detector-field-name] `function`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=function] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=function] `over_field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=over-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=over-field-name] `partition_field_name`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=partition-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=partition-field-name] `use_null`:: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=use-null] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=use-null] [[ml-valid-detector-example]] == {api-examples-title} diff --git a/docs/reference/ml/common/apis/get-ml-memory.asciidoc b/docs/reference/ml/common/apis/get-ml-memory.asciidoc index 2dd79a969aca0..81e0f59a97e58 100644 --- a/docs/reference/ml/common/apis/get-ml-memory.asciidoc +++ b/docs/reference/ml/common/apis/get-ml-memory.asciidoc @@ -42,7 +42,7 @@ node, both within the JVM heap, and natively, outside of the JVM. Specify this query parameter to include the fields with units in the response. Otherwise only the `_in_bytes` sizes are returned in the response. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[get-ml-memory-response-body]] @@ -89,11 +89,11 @@ Contains statistics for the node. ===== `attributes`:: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-attributes] `ephemeral_id`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] `jvm`:: (object) @@ -216,7 +216,7 @@ Roles assigned to the node. See <>. `transport_address`:: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] ===== ==== diff --git a/docs/reference/ml/df-analytics/apis/delete-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/delete-dfanalytics.asciidoc index 8631e1e7ae16f..b505da570244f 100644 --- a/docs/reference/ml/df-analytics/apis/delete-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/delete-dfanalytics.asciidoc @@ -27,7 +27,7 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] [[ml-delete-dfanalytics-query-params]] == {api-query-parms-title} diff --git a/docs/reference/ml/df-analytics/apis/explain-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/explain-dfanalytics.asciidoc index ed0a98474b5c4..0ee7ec5634582 100644 --- a/docs/reference/ml/df-analytics/apis/explain-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/explain-dfanalytics.asciidoc @@ -52,7 +52,7 @@ they are not included in the explanation. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] [[ml-explain-dfanalytics-request-body]] == {api-request-body-title} diff --git a/docs/reference/ml/df-analytics/apis/get-dfanalytics-stats.asciidoc b/docs/reference/ml/df-analytics/apis/get-dfanalytics-stats.asciidoc index b18899cc86558..b193379084db1 100644 --- a/docs/reference/ml/df-analytics/apis/get-dfanalytics-stats.asciidoc +++ b/docs/reference/ml/df-analytics/apis/get-dfanalytics-stats.asciidoc @@ -35,7 +35,7 @@ Requires the `monitor_ml` cluster privilege. This privilege is included in the ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] [[ml-get-dfanalytics-stats-query-params]] @@ -43,19 +43,19 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-def `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] `from`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=from] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=from] `size`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=size] `verbose`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=verbose] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=verbose] [role="child_attributes"] [[ml-get-dfanalytics-stats-response-body]] @@ -95,106 +95,106 @@ An object containing the parameters of the {classanalysis} job. ======= `alpha`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] `class_assignment_objective`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=class-assignment-objective] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=class-assignment-objective] `downsample_factor`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] `eta`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=eta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=eta] `eta_growth_rate_per_tree`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] `feature_bag_fraction`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] `gamma`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=gamma] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=gamma] `lambda`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=lambda] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=lambda] `max_attempts_to_add_tree`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-max-attempts] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-max-attempts] `max_optimization_rounds_per_hyperparameter`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] `max_trees`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-trees] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-trees] `num_folds`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-num-folds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-num-folds] `num_splits_per_feature`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-num-splits] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-num-splits] `soft_tree_depth_limit`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] `soft_tree_depth_tolerance`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] ======= //End class_hyperparameters `iteration`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-iteration] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-iteration] `timestamp`:::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timestamp] //Begin class_timing_stats `timing_stats`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats] + .Properties of `timing_stats` [%collapsible%open] ======= `elapsed_time`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-elapsed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-elapsed] `iteration_time`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-iteration] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-iteration] ======= //End class_timing_stats //Begin class_validation_loss `validation_loss`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss] + .Properties of `validation_loss` [%collapsible%open] ======= `fold_values`:::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-fold] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-fold] `loss_type`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-type] ======= //End class_validation_loss ====== @@ -219,45 +219,45 @@ heuristics. ======= `compute_feature_influence`:::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=compute-feature-influence] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=compute-feature-influence] `feature_influence_threshold`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=feature-influence-threshold] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=feature-influence-threshold] `method`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=method] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=method] `n_neighbors`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=n-neighbors] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=n-neighbors] `outlier_fraction`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=outlier-fraction] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=outlier-fraction] `standardization_enabled`:::: (Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=standardization-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=standardization-enabled] ======= //End parameters `timestamp`:::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timestamp] //Begin od_timing_stats `timing_stats`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats] + .Property of `timing_stats` [%collapsible%open] ======= `elapsed_time`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-elapsed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-elapsed] ======= //End od_timing_stats ====== @@ -281,103 +281,103 @@ An object containing the parameters of the {reganalysis} job. ======= `alpha`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] `downsample_factor`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] `eta`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=eta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=eta] `eta_growth_rate_per_tree`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] `feature_bag_fraction`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] `gamma`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=gamma] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=gamma] `lambda`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=lambda] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=lambda] `max_attempts_to_add_tree`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-max-attempts] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-max-attempts] `max_optimization_rounds_per_hyperparameter`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] `max_trees`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-trees] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-trees] `num_folds`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-num-folds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-num-folds] `num_splits_per_feature`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-num-splits] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-num-splits] `soft_tree_depth_limit`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] `soft_tree_depth_tolerance`:::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] ======= //End reg_hyperparameters `iteration`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-iteration] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-iteration] `timestamp`:::: (date) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timestamp] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timestamp] //Begin reg_timing_stats `timing_stats`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats] + .Propertis of `timing_stats` [%collapsible%open] ======= `elapsed_time`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-elapsed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-elapsed] `iteration_time`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-iteration] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-timing-stats-iteration] ======= //End reg_timing_stats //Begin reg_validation_loss `validation_loss`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss] + .Properties of `validation_loss` [%collapsible%open] ======= `fold_values`:::: (array of strings) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-fold] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-fold] `loss_type`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-validation-loss-type] ======= //End reg_validation_loss ====== @@ -463,15 +463,15 @@ available only for running jobs. ===== `attributes`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-attributes] `ephemeral_id`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] `id`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-id] `name`:::: (string) @@ -479,7 +479,7 @@ The node name. `transport_address`:::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] ===== `progress`::: diff --git a/docs/reference/ml/df-analytics/apis/get-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/get-dfanalytics.asciidoc index 7154dd607e771..c2a4caa981da1 100644 --- a/docs/reference/ml/df-analytics/apis/get-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/get-dfanalytics.asciidoc @@ -39,7 +39,7 @@ by using a comma-separated list of {dfanalytics-jobs} or a wildcard expression. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-default] + -- You can get information for all {dfanalytics-jobs} by using _all, by specifying @@ -53,19 +53,19 @@ You can get information for all {dfanalytics-jobs} by using _all, by specifying `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] `exclude_generated`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] `from`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=from] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=from] `size`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=size] [role="child_attributes"] [[ml-get-dfanalytics-results]] diff --git a/docs/reference/ml/df-analytics/apis/preview-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/preview-dfanalytics.asciidoc index 5e67a0cf78328..2c61c3263992c 100644 --- a/docs/reference/ml/df-analytics/apis/preview-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/preview-dfanalytics.asciidoc @@ -41,7 +41,7 @@ that either exists already or one that has not been created yet. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics] [[ml-preview-dfanalytics-request-body]] == {api-request-body-title} diff --git a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc index f11166f9c1a60..54cbe78b34452 100644 --- a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc @@ -51,7 +51,7 @@ determines a value for each of the undefined parameters. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] [role="child_attributes"] [[ml-put-dfanalytics-request-body]] @@ -93,16 +93,16 @@ understand the function of these parameters. ===== `alpha`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] `class_assignment_objective`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=class-assignment-objective] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=class-assignment-objective] `dependent_variable`:::: (Required, string) + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dependent-variable] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dependent-variable] + The data type of the field must be numeric (`integer`, `short`, `long`, `byte`), categorical (`ip` or `keyword`), or boolean. There must be no more than 100 @@ -110,148 +110,148 @@ different values in this field. `downsample_factor`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] `early_stopping_enabled`:::: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-early-stopping-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-early-stopping-enabled] `eta`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=eta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=eta] `eta_growth_rate_per_tree`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] `feature_bag_fraction`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] `feature_processors`:::: (Optional, list) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors] + .Properties of `feature_processors` [%collapsible%open] ====== `frequency_encoding`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-frequency] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-frequency] + .Properties of `frequency_encoding` [%collapsible%open] ======= `feature_name`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-feat-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-feat-name] `field`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-field] `frequency_map`:::: (Required, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-frequency-map] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-frequency-map] ======= `multi_encoding`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-multi] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-multi] + .Properties of `multi_encoding` [%collapsible%open] ======= `processors`:::: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-multi-proc] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-multi-proc] ======= `n_gram_encoding`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram] + .Properties of `n_gram_encoding` [%collapsible%open] ======= `feature_prefix`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-feat-pref] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-feat-pref] `field`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-field] `length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-length] `n_grams`:::: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-ngrams] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-ngrams] `start`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-start] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-ngram-start] ======= `one_hot_encoding`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-one-hot] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-one-hot] + .Properties of `one_hot_encoding` [%collapsible%open] ======= `field`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-field] `hot_map`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-one-hot-map] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-one-hot-map] ======= `target_mean_encoding`:::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-target-mean] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-target-mean] + .Properties of `target_mean_encoding` [%collapsible%open] ======= `default_value`:::: (Required, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-target-mean-default] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-target-mean-default] `feature_name`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-feat-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-feat-name] `field`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-field] `target_map`:::: (Required, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-target-mean-map] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors-target-mean-map] ======= ====== `gamma`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=gamma] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=gamma] `lambda`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=lambda] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=lambda] `max_optimization_rounds_per_hyperparameter`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] `max_trees`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-trees] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-trees] `num_top_classes`:::: (Optional, integer) @@ -276,23 +276,23 @@ By default, it is zero and no {feat-imp} calculation occurs. `prediction_field_name`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=prediction-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=prediction-field-name] `randomize_seed`:::: (Optional, long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=randomize-seed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=randomize-seed] `soft_tree_depth_limit`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] `soft_tree_depth_tolerance`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] `training_percent`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=training-percent] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=training-percent] //End classification ===== //Begin outlier_detection @@ -306,27 +306,27 @@ The configuration information necessary to perform ===== `compute_feature_influence`:::: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=compute-feature-influence] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=compute-feature-influence] `feature_influence_threshold`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=feature-influence-threshold] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=feature-influence-threshold] `method`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=method] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=method] `n_neighbors`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=n-neighbors] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=n-neighbors] `outlier_fraction`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=outlier-fraction] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=outlier-fraction] `standardization_enabled`:::: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=standardization-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=standardization-enabled] //End outlier_detection ===== //Begin regression @@ -345,46 +345,46 @@ understand the function of these parameters. ===== `alpha`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-alpha] `dependent_variable`:::: (Required, string) + -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dependent-variable] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dependent-variable] + The data type of the field must be numeric. `downsample_factor`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-downsample-factor] `early_stopping_enabled`:::: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-early-stopping-enabled] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-early-stopping-enabled] `eta`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=eta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=eta] `eta_growth_rate_per_tree`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-eta-growth] `feature_bag_fraction`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] `feature_processors`:::: (Optional, list) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-feature-processors] `gamma`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=gamma] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=gamma] `lambda`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=lambda] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=lambda] `loss_function`:::: (Optional, string) @@ -401,11 +401,11 @@ A positive number that is used as a parameter to the `loss_function`. `max_optimization_rounds_per_hyperparameter`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-max-optimization-rounds] `max_trees`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-trees] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-trees] `num_top_feature_importance_values`:::: (Optional, integer) @@ -415,23 +415,23 @@ By default, it is zero and no {feat-imp} calculation occurs. `prediction_field_name`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=prediction-field-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=prediction-field-name] `randomize_seed`:::: (Optional, long) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=randomize-seed] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=randomize-seed] `soft_tree_depth_limit`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-limit] `soft_tree_depth_tolerance`:::: (Optional, double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dfas-soft-tolerance] `training_percent`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=training-percent] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=training-percent] ===== //End regression ==== @@ -493,11 +493,11 @@ analysis. `description`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=description-dfa] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=description-dfa] `dest`:: (Required, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=dest] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=dest] `max_num_threads`:: (Optional, integer) @@ -509,11 +509,11 @@ functionality other than the analysis itself. `_meta`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=meta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=meta] `model_memory_limit`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-dfa] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-dfa] `source`:: (object) diff --git a/docs/reference/ml/df-analytics/apis/start-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/start-dfanalytics.asciidoc index 8ed1fab202068..70e996ef8dd0a 100644 --- a/docs/reference/ml/df-analytics/apis/start-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/start-dfanalytics.asciidoc @@ -53,14 +53,14 @@ when you created the job, those credentials are used. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] [[ml-start-dfanalytics-query-params]] == {api-query-parms-title} `timeout`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=timeout-start] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=timeout-start] [[ml-start-dfanalytics-response-body]] == {api-response-body-title} diff --git a/docs/reference/ml/df-analytics/apis/stop-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/stop-dfanalytics.asciidoc index 3ac7be860fd1c..2fa3bc4413d7a 100644 --- a/docs/reference/ml/df-analytics/apis/stop-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/stop-dfanalytics.asciidoc @@ -42,14 +42,14 @@ stop all {dfanalytics-job} by using _all or by specifying * as the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] [[ml-stop-dfanalytics-query-params]] == {api-query-parms-title} `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] `force`:: @@ -57,7 +57,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-dfa-jobs] `timeout`:: (Optional, <>) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=timeout-stop] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=timeout-stop] [[ml-stop-dfanalytics-example]] diff --git a/docs/reference/ml/df-analytics/apis/update-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/update-dfanalytics.asciidoc index f78b0516a6214..49cca176be69b 100644 --- a/docs/reference/ml/df-analytics/apis/update-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/update-dfanalytics.asciidoc @@ -44,7 +44,7 @@ indices and stores the outcome in a destination index. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=job-id-data-frame-analytics-define] [role="child_attributes"] [[ml-update-dfanalytics-request-body]] @@ -62,7 +62,7 @@ the `starting` state until sufficient {ml} node capacity is available. `description`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=description-dfa] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=description-dfa] `max_num_threads`:: (Optional, integer) @@ -74,11 +74,11 @@ functionality other than the analysis itself. `_meta`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=meta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=meta] `model_memory_limit`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-dfa] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-memory-limit-dfa] [[ml-update-dfanalytics-example]] == {api-examples-title} diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index cc6d9037bd59a..6bbc98db1c2e1 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -234,15 +234,15 @@ is an object it has the following properties: ===== `char_filter`:::: (array of strings or objects) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=char-filter] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=char-filter] `tokenizer`:::: (string or object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=tokenizer] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=tokenizer] `filter`:::: (array of strings or objects) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=filter] ===== end::categorization-analyzer[] @@ -448,7 +448,7 @@ it is not stored in {es}. Only the results for {anomaly-detect} are retained. `time_format`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=time-format] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=time-format] ==== end::data-description[] diff --git a/docs/reference/ml/trained-models/apis/clear-trained-model-deployment-cache.asciidoc b/docs/reference/ml/trained-models/apis/clear-trained-model-deployment-cache.asciidoc index 642afc520e6aa..f24379705fc75 100644 --- a/docs/reference/ml/trained-models/apis/clear-trained-model-deployment-cache.asciidoc +++ b/docs/reference/ml/trained-models/apis/clear-trained-model-deployment-cache.asciidoc @@ -31,7 +31,7 @@ node. Calling this API clears the caches without restarting the deployment. `deployment_id`:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=deployment-id] [[clear-trained-model-deployment-cache-example]] == {api-examples-title} diff --git a/docs/reference/ml/trained-models/apis/delete-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/delete-trained-models.asciidoc index bba39d876ebdc..1b54343d1f1c9 100644 --- a/docs/reference/ml/trained-models/apis/delete-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/delete-trained-models.asciidoc @@ -27,7 +27,7 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] [[ml-delete-trained-models-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc b/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc index aec997855902e..beff87e6ec6e6 100644 --- a/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc +++ b/docs/reference/ml/trained-models/apis/get-trained-models-stats.asciidoc @@ -53,15 +53,15 @@ model. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-models] `from`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=from-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=from-models] `size`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=size-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=size-models] [role="child_attributes"] [[ml-get-trained-models-stats-results]] @@ -120,7 +120,7 @@ The desired number of nodes for model allocation. ====== `deployment_id`::: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=deployment-id] `error_count`::: (integer) @@ -132,7 +132,7 @@ The sum of `inference_count` for all nodes in the deployment. `model_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] `nodes`::: (array of objects) @@ -190,22 +190,22 @@ Information pertaining to the node. ======== `attributes`::: (object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-attributes] `ephemeral_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] `id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-id] `name`::: (string) The node name. `transport_address`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] ======== `number_of_allocations`::: @@ -365,7 +365,7 @@ section in <>. `model_id`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] `model_size_stats`::: (object) diff --git a/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc index 648595a83e8f9..f203578c6f1c7 100644 --- a/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/get-trained-models.asciidoc @@ -38,7 +38,7 @@ Requires the `monitor_ml` cluster privilege. This privilege is included in the ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id-or-alias] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id-or-alias] + You can get information for multiple trained models in a single API request by using a comma-separated list of model IDs or a wildcard expression. @@ -48,7 +48,7 @@ using a comma-separated list of model IDs or a wildcard expression. `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-models] `decompress_definition`:: (Optional, Boolean) @@ -57,11 +57,11 @@ Specifies whether the included model definition should be returned as a JSON map `exclude_generated`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=exclude-generated] `from`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=from-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=from-models] `include`:: (Optional, string) @@ -83,11 +83,11 @@ in the response body. `size`:: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=size-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=size-models] `tags`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=tags] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=tags] [role="child_attributes"] [[ml-get-trained-models-results]] @@ -148,157 +148,157 @@ Classification configuration for inference. ====== `num_top_classes`::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `num_top_feature_importance_values`::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] `prediction_field_type`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] `results_field`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `top_classes_results_field`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] ====== `fill_mask`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-fill-mask] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-fill-mask] + .Properties of fill_mask inference [%collapsible%open] ====== `mask_token`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-mask-token] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-mask-token] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -311,7 +311,7 @@ The index where the vocabulary is stored. `ner`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-ner] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-ner] + .Properties of ner inference [%collapsible%open] @@ -325,126 +325,126 @@ and miscellaneous. For example: `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -457,133 +457,133 @@ The index where the vocabulary is stored `pass_through`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-pass-through] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-pass-through] + .Properties of pass_through inference [%collapsible%open] ====== `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -603,15 +603,15 @@ Regression configuration for inference. ====== `num_top_feature_importance_values`::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] `results_field`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] ====== `text_classification`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification] + .Properties of text_classification inference [%collapsible%open] @@ -626,147 +626,147 @@ Specifies the number of top class predictions to return. Defaults to all classes `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -778,141 +778,141 @@ The index where the vocabulary is stored. ====== `text_embedding`:::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding] + .Properties of text_embedding inference [%collapsible%open] ====== `embedding_size`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding-size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding-size] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -924,157 +924,157 @@ The index where the vocabulary is stored. ====== `text_similarity`:::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] + .Properties of text_similarity inference [%collapsible%open] ====== `span_score_combination_function`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -1086,149 +1086,149 @@ The index where the vocabulary is stored. ====== `zero_shot_classification`:::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification] + .Properties of zero_shot_classification inference [%collapsible%open] ====== `classification_labels`:::: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-classification-labels] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-classification-labels] `hypothesis_template`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-hypothesis-template] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-hypothesis-template] `labels`:::: (Optional, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] `multi_label`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ======= `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======== `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======== `add_prefix_space`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ======== `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======== `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ======== `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======== `do_lower_case`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======== ======= `vocabulary`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-vocabulary] + .Properties of vocabulary [%collapsible%open] @@ -1307,7 +1307,7 @@ hyperparameter optimization. `max_trees`:::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=max-trees-trained-models] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=max-trees-trained-models] `name`:::: (string) @@ -1344,7 +1344,7 @@ request parameter. `feature_name`::: (string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-feature-name] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-feature-name] `importance`::: (object) @@ -1355,15 +1355,15 @@ A collection of {feat-imp} statistics related to the training data set for this ======= `mean_magnitude`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-magnitude] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-magnitude] `max`::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-max] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-max] `min`::: (integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-min] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-min] ======= @@ -1390,15 +1390,15 @@ A collection of {feat-imp} statistics related to the training data set for this ======== `mean_magnitude`::: (double) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-magnitude] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-magnitude] `max`::: (int) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-max] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-max] `min`::: (int) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-min] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-metadata-feature-importance-min] ======== diff --git a/docs/reference/ml/trained-models/apis/infer-trained-model-deployment.asciidoc b/docs/reference/ml/trained-models/apis/infer-trained-model-deployment.asciidoc index d92d74d894a33..83bc56d18df63 100644 --- a/docs/reference/ml/trained-models/apis/infer-trained-model-deployment.asciidoc +++ b/docs/reference/ml/trained-models/apis/infer-trained-model-deployment.asciidoc @@ -31,7 +31,7 @@ deprecated::[8.3.0,Replaced by <>.] ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id-or-alias] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id-or-alias] [[infer-trained-model-deployment-query-params]] == {api-query-parms-title} diff --git a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc index d7201fcf42c0a..9aac913e7559f 100644 --- a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc +++ b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc @@ -34,14 +34,14 @@ directly from the {infer} cache. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id-or-alias] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id-or-alias] If you specify the `model_id` in the API call, and the model has multiple deployments, a random deployment will be used. If the `model_id` matches the ID of one of the deployments, that deployment will be used. ``:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=deployment-id] [[infer-trained-model-query-params]] == {api-query-parms-title} @@ -84,28 +84,28 @@ Classification configuration for inference. ===== `num_top_classes`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `num_top_feature_importance_values`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] `prediction_field_type`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `top_classes_results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] ===== `fill_mask`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-fill-mask] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-fill-mask] + .Properties of fill_mask inference [%collapsible%open] @@ -117,228 +117,228 @@ to `0`. `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== `ner`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-ner] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-ner] + .Properties of ner inference [%collapsible%open] ===== `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== `pass_through`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-pass-through] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-pass-through] + .Properties of pass_through inference [%collapsible%open] ===== `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== `question_answering`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-question-answering] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-question-answering] + .Properties of question_answering inference [%collapsible%open] @@ -358,11 +358,11 @@ The question to use when extracting an answer `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Recommended to set `max_sequence_length` to `386` with `128` of `span` and set `truncate` to `none`. @@ -372,78 +372,78 @@ Recommended to set `max_sequence_length` to `386` with `128` of `span` and set ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== @@ -457,16 +457,16 @@ Regression configuration for inference. ===== `num_top_feature_importance_values`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] ===== `text_classification`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification] + .Properties of text_classification inference [%collapsible%open] @@ -481,346 +481,346 @@ Specifies the number of top class predictions to return. Defaults to all classes `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== `text_embedding`::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding] + .Properties of text_embedding inference [%collapsible%open] ===== `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== `text_similarity`::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] + .Properties of text_similarity inference [%collapsible%open] ===== `span_score_combination_function`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] `text`:::: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-text] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-text] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `span`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `with_special_tokens`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ======= ====== ===== `zero_shot_classification`::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification] + .Properties of zero_shot_classification inference [%collapsible%open] ===== `labels`:::: (Optional, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] `multi_label`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + .Properties of tokenization [%collapsible%open] ====== `bert`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `mpnet`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `xlm_roberta`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= `bert_ja`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ======= `truncate`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= ====== ===== diff --git a/docs/reference/ml/trained-models/apis/put-trained-model-definition-part.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-model-definition-part.asciidoc index 53cad5cffa377..d1da29abffcd3 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-model-definition-part.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-model-definition-part.asciidoc @@ -26,7 +26,7 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] ``:: (Required, number) diff --git a/docs/reference/ml/trained-models/apis/put-trained-model-vocabulary.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-model-vocabulary.asciidoc index 8541583f368fc..2fdf86259388f 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-model-vocabulary.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-model-vocabulary.asciidoc @@ -34,7 +34,7 @@ The vocabulary is stored in the index as described in ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] [[ml-put-trained-model-vocabulary-request-body]] == {api-request-body-title} diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 433286a8e0c2e..eef90630eb35b 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -40,7 +40,7 @@ created by {dfanalytics}. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] [[ml-put-trained-models-query-params]] == {api-query-parms-title} @@ -104,7 +104,7 @@ The field name to encode. Object that maps the field value to the frequency encoded value. `custom`:: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-preprocessor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-preprocessor] ====== //End frequency encoding @@ -126,7 +126,7 @@ The field name to encode. String map of "field_value: one_hot_column_name". `custom`:: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-preprocessor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-preprocessor] ====== //End one hot encoding @@ -156,7 +156,7 @@ The field name to encode. Object that maps the field value to the target mean value. `custom`:: -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=custom-preprocessor] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=custom-preprocessor] ====== //End target mean encoding @@ -409,28 +409,28 @@ Classification configuration for inference. ===== `num_top_classes`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-classes] `num_top_feature_importance_values`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-num-top-feature-importance-values] `prediction_field_type`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-prediction-field-type] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `top_classes_results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-classification-top-classes-results-field] ===== `fill_mask`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-fill-mask] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-fill-mask] + .Properties of fill_mask inference [%collapsible%open] @@ -441,11 +441,11 @@ Number of top predicted tokens to return for replacing the mask token. Defaults `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -453,7 +453,7 @@ Refer to <> to review the properties of the `ner`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-ner] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-ner] + .Properties of ner inference [%collapsible%open] @@ -467,11 +467,11 @@ Example: ["O", "B-PER", "I-PER", "B-ORG", "I-ORG", "B-LOC", "I-LOC", "B-MISC", `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -479,18 +479,18 @@ properties of the `tokenization` object. `pass_through`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-pass-through] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-pass-through] + .Properties of pass_through inference [%collapsible%open] ===== `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -498,7 +498,7 @@ Refer to <> to review the properties of the `question_answering`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-question-answering] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-question-answering] + .Properties of question_answering inference [%collapsible%open] @@ -509,11 +509,11 @@ The maximum amount of words in the answer. Defaults to `15`. `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Recommended to set `max_sentence_length` to `386` with `128` of `span` and set `truncate` to `none`. @@ -531,16 +531,16 @@ Regression configuration for inference. ===== `num_top_feature_importance_values`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-regression-num-top-feature-importance-values] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] ===== `text_classification`::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-classification] + .Properties of text_classification inference [%collapsible%open] @@ -555,11 +555,11 @@ Specifies the number of top class predictions to return. Defaults to all classes `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -567,22 +567,22 @@ Refer to <> to review the properties of the `text_embedding`::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding] + .Properties of text_embedding inference [%collapsible%open] ===== `embedding_size`:::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding-size] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-embedding-size] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -590,18 +590,18 @@ Refer to <> to review the properties of the `text_similarity`:::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity] + .Properties of text_similarity inference [%collapsible%open] ===== `span_score_combination_function`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-text-similarity-span-score-func] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -609,34 +609,34 @@ Refer to <> to review the properties of the `zero_shot_classification`::: (Object, optional) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification] + .Properties of zero_shot_classification inference [%collapsible%open] ===== `classification_labels`:::: (Required, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-classification-labels] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-classification-labels] `hypothesis_template`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-hypothesis-template] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-hypothesis-template] `labels`:::: (Optional, array) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-labels] `multi_label`:::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-zero-shot-classification-multi-label] `results_field`:::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-results-field] `tokenization`:::: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization] + Refer to <> to review the properties of the `tokenization` object. @@ -748,134 +748,134 @@ The `tokenization` object has the following properties. `bert`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert] + .Properties of bert [%collapsible%open] ==== `do_lower_case`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] ==== `roberta`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] + .Properties of roberta [%collapsible%open] ==== `add_prefix_space`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-add-prefix-space] `max_sequence_length`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ==== `mpnet`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet] + .Properties of mpnet [%collapsible%open] ==== `do_lower_case`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-mpnet-with-special-tokens] ==== `xlm_roberta`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-xlm-roberta] + .Properties of xlm_roberta [%collapsible%open] ==== `max_sequence_length`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta-with-special-tokens] ==== `bert_ja`:: (Optional, object) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja] + .Properties of bert_ja [%collapsible%open] ==== `do_lower_case`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-do-lower-case] `max_sequence_length`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-max-sequence-length] `span`::: (Optional, integer) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] `truncate`::: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] `with_special_tokens`::: (Optional, boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-ja-with-special-tokens] ==== diff --git a/docs/reference/ml/trained-models/apis/start-trained-model-deployment.asciidoc b/docs/reference/ml/trained-models/apis/start-trained-model-deployment.asciidoc index 5771579cfaf30..50754ac554439 100644 --- a/docs/reference/ml/trained-models/apis/start-trained-model-deployment.asciidoc +++ b/docs/reference/ml/trained-models/apis/start-trained-model-deployment.asciidoc @@ -53,7 +53,7 @@ the node's allocated processors. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=model-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=model-id] [[start-trained-model-deployment-query-params]] == {api-query-parms-title} @@ -67,7 +67,7 @@ cache, `0b` can be provided. `deployment_id`:: (Optional, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=deployment-id] Defaults to `model_id`. `number_of_allocations`:: diff --git a/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc b/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc index 335d76ffcd56a..622b440622cd3 100644 --- a/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc +++ b/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc @@ -29,7 +29,7 @@ Deployment is required only for trained models that have a PyTorch `model_type`. ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=deployment-id] [[stop-trained-model-deployment-query-params]] @@ -37,7 +37,7 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-deployments] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=allow-no-match-deployments] `force`:: (Optional, Boolean) If true, the deployment is stopped even if it or one of its diff --git a/docs/reference/ml/trained-models/apis/update-trained-model-deployment.asciidoc b/docs/reference/ml/trained-models/apis/update-trained-model-deployment.asciidoc index 1b5f94cceeaf9..93547c1d3e9b7 100644 --- a/docs/reference/ml/trained-models/apis/update-trained-model-deployment.asciidoc +++ b/docs/reference/ml/trained-models/apis/update-trained-model-deployment.asciidoc @@ -34,7 +34,7 @@ You can either increase or decrease the number of allocations of such a deployme ``:: (Required, string) -include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=deployment-id] +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=deployment-id] [[update-trained-model-deployment-request-body]] == {api-request-body-title} diff --git a/docs/reference/modules/cluster/misc.asciidoc b/docs/reference/modules/cluster/misc.asciidoc index 7eb1cf357498f..3da5df4f16414 100644 --- a/docs/reference/modules/cluster/misc.asciidoc +++ b/docs/reference/modules/cluster/misc.asciidoc @@ -163,7 +163,7 @@ increase it if you expect nodes to be absent from the cluster and miss more than 500 deletes. We think that is rare, thus the default. Tombstones don't take up much space, but we also think that a number like 50,000 is probably too big. -include::{es-repo-dir}/indices/dangling-indices-list.asciidoc[tag=dangling-index-description] +include::{es-ref-dir}/indices/dangling-indices-list.asciidoc[tag=dangling-index-description] You can use the <> to manage this situation. diff --git a/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc b/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc index 0cac52deaae4b..506e834e0b1c1 100644 --- a/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-api-key.asciidoc @@ -198,4 +198,4 @@ Configuring the `remote_cluster_client` settings in `elasticsearch.yml` still re include::remote-clusters-connect.asciidoc[] :!trust-mechanism: -include::{es-repo-dir}/security/authentication/remote-clusters-privileges-api-key.asciidoc[leveloffset=+1] +include::{es-ref-dir}/security/authentication/remote-clusters-privileges-api-key.asciidoc[leveloffset=+1] diff --git a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc index 11d71955cfe60..6602c807f5b64 100644 --- a/docs/reference/modules/cluster/remote-clusters-cert.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-cert.asciidoc @@ -80,4 +80,4 @@ generate certificates for all nodes simplifies this task. include::remote-clusters-connect.asciidoc[] :!trust-mechanism: -include::{es-repo-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] \ No newline at end of file +include::{es-ref-dir}/security/authentication/remote-clusters-privileges-cert.asciidoc[leveloffset=+1] \ No newline at end of file diff --git a/docs/reference/modules/indices/index_management.asciidoc b/docs/reference/modules/indices/index_management.asciidoc index cdb8af570c6d4..5f7274b2271d0 100644 --- a/docs/reference/modules/indices/index_management.asciidoc +++ b/docs/reference/modules/indices/index_management.asciidoc @@ -46,9 +46,9 @@ to `true`. This setting affects the following built-in index templates: -include::{es-repo-dir}/indices/index-templates.asciidoc[tag=built-in-index-template-patterns] +include::{es-ref-dir}/indices/index-templates.asciidoc[tag=built-in-index-template-patterns] This setting also affects the following built-in component templates: -include::{es-repo-dir}/indices/put-component-template.asciidoc[tag=built-in-component-templates] +include::{es-ref-dir}/indices/put-component-template.asciidoc[tag=built-in-component-templates] -- diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 8a42d11f6367a..81df2cf4a2a6c 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -253,7 +253,7 @@ node.roles: [ data ] ===== Content data node Content data nodes are part of the content tier. -include::{es-repo-dir}/datatiers.asciidoc[tag=content-tier] +include::{es-ref-dir}/datatiers.asciidoc[tag=content-tier] To create a dedicated content node, set: [source,yaml] @@ -265,7 +265,7 @@ node.roles: [ data_content ] ===== Hot data node Hot data nodes are part of the hot tier. -include::{es-repo-dir}/datatiers.asciidoc[tag=hot-tier] +include::{es-ref-dir}/datatiers.asciidoc[tag=hot-tier] To create a dedicated hot node, set: [source,yaml] @@ -277,7 +277,7 @@ node.roles: [ data_hot ] ===== Warm data node Warm data nodes are part of the warm tier. -include::{es-repo-dir}/datatiers.asciidoc[tag=warm-tier] +include::{es-ref-dir}/datatiers.asciidoc[tag=warm-tier] To create a dedicated warm node, set: [source,yaml] @@ -289,7 +289,7 @@ node.roles: [ data_warm ] ===== Cold data node Cold data nodes are part of the cold tier. -include::{es-repo-dir}/datatiers.asciidoc[tag=cold-tier] +include::{es-ref-dir}/datatiers.asciidoc[tag=cold-tier] To create a dedicated cold node, set: [source,yaml] @@ -301,7 +301,7 @@ node.roles: [ data_cold ] ===== Frozen data node Frozen data nodes are part of the frozen tier. -include::{es-repo-dir}/datatiers.asciidoc[tag=frozen-tier] +include::{es-ref-dir}/datatiers.asciidoc[tag=frozen-tier] To create a dedicated frozen node, set: [source,yaml] diff --git a/docs/reference/query-dsl/knn-query.asciidoc b/docs/reference/query-dsl/knn-query.asciidoc index 56f90c7d5da01..b7ded6929ed21 100644 --- a/docs/reference/query-dsl/knn-query.asciidoc +++ b/docs/reference/query-dsl/knn-query.asciidoc @@ -96,7 +96,7 @@ Either this or `query_vector_builder` must be provided. + -- (Optional, object) Query vector builder. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector-builder] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector-builder] -- diff --git a/docs/reference/query-dsl/script-score-query.asciidoc b/docs/reference/query-dsl/script-score-query.asciidoc index 08522708554f9..9291b8c15f0d1 100644 --- a/docs/reference/query-dsl/script-score-query.asciidoc +++ b/docs/reference/query-dsl/script-score-query.asciidoc @@ -335,7 +335,7 @@ through a script: The `script_score` query has equivalent <> that can be used in scripts. -include::{es-repo-dir}/vectors/vector-functions.asciidoc[] +include::{es-ref-dir}/vectors/vector-functions.asciidoc[] [[score-explanation]] ===== Explain request diff --git a/docs/reference/repositories-metering-api/apis/clear-repositories-metering-archive.asciidoc b/docs/reference/repositories-metering-api/apis/clear-repositories-metering-archive.asciidoc index 991b015f63568..addcd65f0e84a 100644 --- a/docs/reference/repositories-metering-api/apis/clear-repositories-metering-archive.asciidoc +++ b/docs/reference/repositories-metering-api/apis/clear-repositories-metering-archive.asciidoc @@ -26,7 +26,7 @@ You can use this API to clear the archived repositories metering information in [[clear-repositories-metering-archive-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] ``:: (long) Specifies the maximum <> to be cleared from the archive. @@ -37,4 +37,4 @@ All the nodes selective options are explained <>. ==== {api-response-body-title} Returns the deleted archived repositories metering information. -include::{es-repo-dir}/repositories-metering-api/apis/repositories-meterings-body.asciidoc[tag=repositories-metering-body] +include::{es-ref-dir}/repositories-metering-api/apis/repositories-meterings-body.asciidoc[tag=repositories-metering-body] diff --git a/docs/reference/repositories-metering-api/apis/get-repositories-metering.asciidoc b/docs/reference/repositories-metering-api/apis/get-repositories-metering.asciidoc index 4e7eb876ad725..314f85a7dba52 100644 --- a/docs/reference/repositories-metering-api/apis/get-repositories-metering.asciidoc +++ b/docs/reference/repositories-metering-api/apis/get-repositories-metering.asciidoc @@ -30,11 +30,11 @@ exposed by this API is volatile, meaning that it won't be present after node res [[get-repositories-metering-api-path-params]] ==== {api-path-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=node-id] All the nodes selective options are explained <>. [role="child_attributes"] [[get-repositories-metering-api-response-body]] ==== {api-response-body-title} -include::{es-repo-dir}/repositories-metering-api/apis/repositories-meterings-body.asciidoc[tag=repositories-metering-body] +include::{es-ref-dir}/repositories-metering-api/apis/repositories-meterings-body.asciidoc[tag=repositories-metering-body] diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 39d9e90079bff..e63f66217d8d7 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1180,7 +1180,7 @@ tag::target-index[] (Required, string) Name of the target index to create. -include::{es-repo-dir}/indices/create-index.asciidoc[tag=index-name-reqs] +include::{es-ref-dir}/indices/create-index.asciidoc[tag=index-name-reqs] -- end::target-index[] diff --git a/docs/reference/rest-api/defs.asciidoc b/docs/reference/rest-api/defs.asciidoc index a80d4a42eb30d..52b05daaefd54 100644 --- a/docs/reference/rest-api/defs.asciidoc +++ b/docs/reference/rest-api/defs.asciidoc @@ -8,4 +8,4 @@ to {security-features}. * <> -include::{es-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[] +include::{es-ref-dir}/rest-api/security/role-mapping-resources.asciidoc[] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index fa0d3babb3a0c..50c9f96ad81b0 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -60,52 +60,52 @@ not be included yet. * <> -- -include::{es-repo-dir}/api-conventions.asciidoc[] -include::{es-repo-dir}/rest-api/common-options.asciidoc[] -include::{es-repo-dir}/rest-api/rest-api-compatibility.asciidoc[] -include::{es-repo-dir}/autoscaling/apis/autoscaling-apis.asciidoc[] -include::{es-repo-dir}/behavioral-analytics/apis/index.asciidoc[] -include::{es-repo-dir}/cat.asciidoc[] -include::{es-repo-dir}/cluster.asciidoc[] -include::{es-repo-dir}/ccr/apis/ccr-apis.asciidoc[] -include::{es-repo-dir}/connector/apis/connector-apis.asciidoc[] -include::{es-repo-dir}/data-streams/data-stream-apis.asciidoc[] -include::{es-repo-dir}/docs.asciidoc[] -include::{es-repo-dir}/ingest/apis/enrich/index.asciidoc[] -include::{es-repo-dir}/eql/eql-apis.asciidoc[] -include::{es-repo-dir}/esql/esql-apis.asciidoc[] -include::{es-repo-dir}/features/apis/features-apis.asciidoc[] -include::{es-repo-dir}/fleet/index.asciidoc[] -include::{es-repo-dir}/graph/explore.asciidoc[] -include::{es-repo-dir}/indices.asciidoc[] -include::{es-repo-dir}/ilm/apis/ilm-api.asciidoc[] -include::{es-repo-dir}/inference/inference-apis.asciidoc[] +include::{es-ref-dir}/api-conventions.asciidoc[] +include::{es-ref-dir}/rest-api/common-options.asciidoc[] +include::{es-ref-dir}/rest-api/rest-api-compatibility.asciidoc[] +include::{es-ref-dir}/autoscaling/apis/autoscaling-apis.asciidoc[] +include::{es-ref-dir}/behavioral-analytics/apis/index.asciidoc[] +include::{es-ref-dir}/cat.asciidoc[] +include::{es-ref-dir}/cluster.asciidoc[] +include::{es-ref-dir}/ccr/apis/ccr-apis.asciidoc[] +include::{es-ref-dir}/connector/apis/connector-apis.asciidoc[] +include::{es-ref-dir}/data-streams/data-stream-apis.asciidoc[] +include::{es-ref-dir}/docs.asciidoc[] +include::{es-ref-dir}/ingest/apis/enrich/index.asciidoc[] +include::{es-ref-dir}/eql/eql-apis.asciidoc[] +include::{es-ref-dir}/esql/esql-apis.asciidoc[] +include::{es-ref-dir}/features/apis/features-apis.asciidoc[] +include::{es-ref-dir}/fleet/index.asciidoc[] +include::{es-ref-dir}/graph/explore.asciidoc[] +include::{es-ref-dir}/indices.asciidoc[] +include::{es-ref-dir}/ilm/apis/ilm-api.asciidoc[] +include::{es-ref-dir}/inference/inference-apis.asciidoc[] include::info.asciidoc[] -include::{es-repo-dir}/ingest/apis/index.asciidoc[] -include::{es-repo-dir}/licensing/index.asciidoc[] -include::{es-repo-dir}/rest-api/logstash/index.asciidoc[] -include::{es-repo-dir}/ml/common/apis/index.asciidoc[] -include::{es-repo-dir}/ml/anomaly-detection/apis/index.asciidoc[] -include::{es-repo-dir}/ml/df-analytics/apis/index.asciidoc[] -include::{es-repo-dir}/ml/trained-models/apis/index.asciidoc[] -include::{es-repo-dir}/migration/migration.asciidoc[] -include::{es-repo-dir}/shutdown/apis/shutdown-api.asciidoc[] -include::{es-repo-dir}/query-rules/apis/index.asciidoc[] -include::{es-repo-dir}/indices/apis/reload-analyzers.asciidoc[] -include::{es-repo-dir}/repositories-metering-api/repositories-metering-apis.asciidoc[] -include::{es-repo-dir}/rollup/rollup-apis.asciidoc[] -include::{es-repo-dir}/rest-api/root.asciidoc[] -include::{es-repo-dir}/scripting/apis/script-apis.asciidoc[] -include::{es-repo-dir}/search.asciidoc[] -include::{es-repo-dir}/search-application/apis/index.asciidoc[] -include::{es-repo-dir}/searchable-snapshots/apis/searchable-snapshots-apis.asciidoc[] -include::{es-repo-dir}/rest-api/security.asciidoc[] -include::{es-repo-dir}/snapshot-restore/apis/snapshot-restore-apis.asciidoc[] -include::{es-repo-dir}/slm/apis/slm-api.asciidoc[] -include::{es-repo-dir}/sql/apis/sql-apis.asciidoc[] -include::{es-repo-dir}/synonyms/apis/synonyms-apis.asciidoc[] -include::{es-repo-dir}/text-structure/apis/index.asciidoc[] -include::{es-repo-dir}/transform/apis/index.asciidoc[] +include::{es-ref-dir}/ingest/apis/index.asciidoc[] +include::{es-ref-dir}/licensing/index.asciidoc[] +include::{es-ref-dir}/rest-api/logstash/index.asciidoc[] +include::{es-ref-dir}/ml/common/apis/index.asciidoc[] +include::{es-ref-dir}/ml/anomaly-detection/apis/index.asciidoc[] +include::{es-ref-dir}/ml/df-analytics/apis/index.asciidoc[] +include::{es-ref-dir}/ml/trained-models/apis/index.asciidoc[] +include::{es-ref-dir}/migration/migration.asciidoc[] +include::{es-ref-dir}/shutdown/apis/shutdown-api.asciidoc[] +include::{es-ref-dir}/query-rules/apis/index.asciidoc[] +include::{es-ref-dir}/indices/apis/reload-analyzers.asciidoc[] +include::{es-ref-dir}/repositories-metering-api/repositories-metering-apis.asciidoc[] +include::{es-ref-dir}/rollup/rollup-apis.asciidoc[] +include::{es-ref-dir}/rest-api/root.asciidoc[] +include::{es-ref-dir}/scripting/apis/script-apis.asciidoc[] +include::{es-ref-dir}/search.asciidoc[] +include::{es-ref-dir}/search-application/apis/index.asciidoc[] +include::{es-ref-dir}/searchable-snapshots/apis/searchable-snapshots-apis.asciidoc[] +include::{es-ref-dir}/rest-api/security.asciidoc[] +include::{es-ref-dir}/snapshot-restore/apis/snapshot-restore-apis.asciidoc[] +include::{es-ref-dir}/slm/apis/slm-api.asciidoc[] +include::{es-ref-dir}/sql/apis/sql-apis.asciidoc[] +include::{es-ref-dir}/synonyms/apis/synonyms-apis.asciidoc[] +include::{es-ref-dir}/text-structure/apis/index.asciidoc[] +include::{es-ref-dir}/transform/apis/index.asciidoc[] include::usage.asciidoc[] -include::{es-repo-dir}/rest-api/watcher.asciidoc[] +include::{es-ref-dir}/rest-api/watcher.asciidoc[] include::defs.asciidoc[] diff --git a/docs/reference/rest-api/security/disable-user-profile.asciidoc b/docs/reference/rest-api/security/disable-user-profile.asciidoc index a25b4a311aa81..35658f071679b 100644 --- a/docs/reference/rest-api/security/disable-user-profile.asciidoc +++ b/docs/reference/rest-api/security/disable-user-profile.asciidoc @@ -45,7 +45,7 @@ To re-enable a disabled user profile, use the [[security-api-disable-user-profile-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] diff --git a/docs/reference/rest-api/security/enable-user-profile.asciidoc b/docs/reference/rest-api/security/enable-user-profile.asciidoc index c61e847cfd1f1..e27673b07f598 100644 --- a/docs/reference/rest-api/security/enable-user-profile.asciidoc +++ b/docs/reference/rest-api/security/enable-user-profile.asciidoc @@ -42,7 +42,7 @@ enable user profile API to make the profile visible in these searches again. [[security-api-enable-user-profile-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] [[security-api-enable-user-profile-example]] ==== {api-examples-title} diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index ad4184ec34a29..5760968cc7e9a 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -242,7 +242,7 @@ This supports only a subset of aggregation types, namely: <>, and <>. Additionally, aggregations only run over the same subset of fields that `query` works with. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from] + By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the diff --git a/docs/reference/rest-api/security/query-user.asciidoc b/docs/reference/rest-api/security/query-user.asciidoc index 92c293d1fe593..d0c7b44faf3bd 100644 --- a/docs/reference/rest-api/security/query-user.asciidoc +++ b/docs/reference/rest-api/security/query-user.asciidoc @@ -70,7 +70,7 @@ Specifies whether the user is enabled. (Optional, boolean) Determines whether to retrieve the <> `uid`, if exists, for the users. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from] + By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the diff --git a/docs/reference/rest-api/security/update-user-profile-data.asciidoc b/docs/reference/rest-api/security/update-user-profile-data.asciidoc index d4c89558c3fca..01fa5e11d10e8 100644 --- a/docs/reference/rest-api/security/update-user-profile-data.asciidoc +++ b/docs/reference/rest-api/security/update-user-profile-data.asciidoc @@ -48,11 +48,11 @@ the allowed namespaces. [[security-api-update-user-profile-data-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] `uid`:: (Required, string) A unique identifier for the user profile. diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index bbbd73a0523f3..6bdfaab17a4d0 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -28,7 +28,7 @@ available under the current license and some usage statistics. [[usage-api-query-parms]] === {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [discrete] [[usage-api-example]] diff --git a/docs/reference/rest-api/watcher/start.asciidoc b/docs/reference/rest-api/watcher/start.asciidoc index 10811ac0b8617..565ef60160a9d 100644 --- a/docs/reference/rest-api/watcher/start.asciidoc +++ b/docs/reference/rest-api/watcher/start.asciidoc @@ -27,7 +27,7 @@ information, see <>. [[watcher-api-start-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] //[[watcher-api-start-request-body]] //==== {api-request-body-title} diff --git a/docs/reference/rest-api/watcher/stop.asciidoc b/docs/reference/rest-api/watcher/stop.asciidoc index c06090a3cd999..50acd6e9eb2d1 100644 --- a/docs/reference/rest-api/watcher/stop.asciidoc +++ b/docs/reference/rest-api/watcher/stop.asciidoc @@ -27,7 +27,7 @@ information, see <>. [[watcher-api-stop-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] //[[watcher-api-stop-request-body]] //==== {api-request-body-title} diff --git a/docs/reference/scripting/apis/create-stored-script-api.asciidoc b/docs/reference/scripting/apis/create-stored-script-api.asciidoc index 6108831a836b1..dab1314e65dc4 100644 --- a/docs/reference/scripting/apis/create-stored-script-api.asciidoc +++ b/docs/reference/scripting/apis/create-stored-script-api.asciidoc @@ -67,7 +67,7 @@ the API immediately compiles the script or template in this context. If you specify both this and the `` request path parameter, the API uses the request path parameter. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[create-stored-script-api-request-body]] diff --git a/docs/reference/scripting/apis/delete-stored-script-api.asciidoc b/docs/reference/scripting/apis/delete-stored-script-api.asciidoc index 038d1916f76fd..e233922c9a7d5 100644 --- a/docs/reference/scripting/apis/delete-stored-script-api.asciidoc +++ b/docs/reference/scripting/apis/delete-stored-script-api.asciidoc @@ -47,4 +47,4 @@ Identifier for the stored script or search template. [[delete-stored-script-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] diff --git a/docs/reference/scripting/apis/get-stored-script-api.asciidoc b/docs/reference/scripting/apis/get-stored-script-api.asciidoc index 6b6f6648a7ed6..fffeb24e0331f 100644 --- a/docs/reference/scripting/apis/get-stored-script-api.asciidoc +++ b/docs/reference/scripting/apis/get-stored-script-api.asciidoc @@ -55,4 +55,4 @@ Identifier for the stored script or search template. [[get-stored-script-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] diff --git a/docs/reference/scripting/engine.asciidoc b/docs/reference/scripting/engine.asciidoc index 54d85e6e8236f..ef062e8afab14 100644 --- a/docs/reference/scripting/engine.asciidoc +++ b/docs/reference/scripting/engine.asciidoc @@ -17,7 +17,7 @@ the document frequency of a provided term. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{plugins-examples-dir}/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java[expert_engine] +include-tagged::{elasticsearch-root}/plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java[expert_engine] -------------------------------------------------- You can execute the script by specifying its `lang` as `expert_scripts`, and the name diff --git a/docs/reference/search/count.asciidoc b/docs/reference/search/count.asciidoc index 152571a25ec17..399545adf8d1d 100644 --- a/docs/reference/search/count.asciidoc +++ b/docs/reference/search/count.asciidoc @@ -56,46 +56,46 @@ this parameter or use `*` or `_all`. [[search-count-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyzer] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyzer] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=default_operator] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=default_operator] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=df] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=df] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=lenient] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=lenient] `min_score`:: (Optional, float) Sets the minimum `_score` value that documents must have to be included in the result. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-q] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-q] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] [[search-count-request-body]] ==== {api-request-body-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=query] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=query] [[search-count-api-example]] diff --git a/docs/reference/search/explain.asciidoc b/docs/reference/search/explain.asciidoc index c9e297c93413b..77e2d5bd63efd 100644 --- a/docs/reference/search/explain.asciidoc +++ b/docs/reference/search/explain.asciidoc @@ -59,37 +59,37 @@ Only a single index name can be provided to this parameter. [[search-explain-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyzer] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyzer] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=default_operator] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=default_operator] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=df] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=df] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=lenient] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=lenient] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-q] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-q] `stored_fields`:: (Optional, string) A comma-separated list of stored fields to return in the response. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_includes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_includes] [[search-explain-api-request-body]] ==== {api-request-body-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=query] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=query] [[search-explain-api-example]] diff --git a/docs/reference/search/field-caps.asciidoc b/docs/reference/search/field-caps.asciidoc index 5fe924d38e028..2ff2b8d18604e 100644 --- a/docs/reference/search/field-caps.asciidoc +++ b/docs/reference/search/field-caps.asciidoc @@ -61,17 +61,17 @@ and indices, omit this parameter or use `*` or `_all`. Comma-separated list of fields to retrieve capabilities for. Wildcard (`*`) expressions are supported. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + -- Defaults to `open`. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `include_unmapped`:: (Optional, Boolean) If `true`, unmapped fields that are mapped in one index but not in another are included in the response. Fields that don't have any mapping are never included. diff --git a/docs/reference/search/knn-search.asciidoc b/docs/reference/search/knn-search.asciidoc index 7947c688a807c..78e3e13b09fee 100644 --- a/docs/reference/search/knn-search.asciidoc +++ b/docs/reference/search/knn-search.asciidoc @@ -91,7 +91,7 @@ use `*` or `_all`. [[knn-search-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] [role="child_attributes"] [[knn-search-api-request-body]] @@ -99,36 +99,36 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] `filter`:: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] `knn`:: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn] + .Properties of `knn` object [%collapsible%open] ==== `field`:: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-field] `k`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-k] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-k] `num_candidates`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-num-candidates] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-num-candidates] `query_vector`:: (Required, array of floats or string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector] ==== -include::{es-repo-dir}/search/search.asciidoc[tag=docvalue-fields-def] -include::{es-repo-dir}/search/search.asciidoc[tag=fields-param-def] -include::{es-repo-dir}/search/search.asciidoc[tag=source-filtering-def] -include::{es-repo-dir}/search/search.asciidoc[tag=stored-fields-def] +include::{es-ref-dir}/search/search.asciidoc[tag=docvalue-fields-def] +include::{es-ref-dir}/search/search.asciidoc[tag=fields-param-def] +include::{es-ref-dir}/search/search.asciidoc[tag=source-filtering-def] +include::{es-ref-dir}/search/search.asciidoc[tag=stored-fields-def] [role="child_attributes"] [[knn-search-api-response-body]] diff --git a/docs/reference/search/multi-search-template-api.asciidoc b/docs/reference/search/multi-search-template-api.asciidoc index a680795737ff4..c8eea52a6fd9b 100644 --- a/docs/reference/search/multi-search-template-api.asciidoc +++ b/docs/reference/search/multi-search-template-api.asciidoc @@ -89,7 +89,7 @@ min(<>, 10)))+. (Optional, Boolean) If `true`, the response returns `hits.total` as an integer. If false, it returns `hits.total` as an object. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search_type] `typed_keys`:: (Optional, Boolean) If `true`, the response prefixes aggregation and suggester diff --git a/docs/reference/search/multi-search.asciidoc b/docs/reference/search/multi-search.asciidoc index 90056d5036558..9cafa756f035e 100644 --- a/docs/reference/search/multi-search.asciidoc +++ b/docs/reference/search/multi-search.asciidoc @@ -72,7 +72,7 @@ in a cluster, omit this parameter or use `_all` or `*`. [[search-multi-search-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] `ccs_minimize_roundtrips`:: (Optional, Boolean) @@ -80,13 +80,13 @@ If `true`, network roundtrips between the coordinating node and remote clusters are minimized for {ccs} requests. Defaults to `true`. See <>. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] `max_concurrent_searches`:: (Optional, integer) @@ -184,7 +184,7 @@ If `true`, the request does *not* return an error if a wildcard expression or This parameter also applies to <> that point to a missing or index. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index e008b9a8aef80..05862ebbbcca5 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -78,17 +78,17 @@ and indices, omit this parameter or use `*` or `_all`. [[search-rank-eval-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + -- Defaults to `open`. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] [[search-rank-eval-api-example]] diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index a872f1866f42c..42d2129f0fdec 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -46,7 +46,7 @@ where all documents must match this query but do not contribute to the score. + Defines a search after object parameter used for pagination. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] `sort`:: + @@ -97,12 +97,12 @@ A kNN retriever returns top documents from a <>) + -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] `similarity`:: (Optional, float) + -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-similarity] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-similarity] ===== Restrictions @@ -160,11 +160,11 @@ equally weighting two or more child retrievers. ===== Parameters -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=rrf-window-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-window-size] ===== Restrictions diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index 813d1f6557bed..96477cdee45f1 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -41,11 +41,11 @@ An RRF retriever is an optional object defined as part of a search request's <>. The RRF retriever object contains the following parameters: -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-rank-constant] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=rrf-window-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-window-size] An example request using RRF: diff --git a/docs/reference/search/search-shards.asciidoc b/docs/reference/search/search-shards.asciidoc index d0a518865baf4..b9646f4d37303 100644 --- a/docs/reference/search/search-shards.asciidoc +++ b/docs/reference/search/search-shards.asciidoc @@ -45,23 +45,23 @@ this parameter or use `*` or `_all`. [[search-shards-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + -- Defaults to `open`. -- -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=local] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] [[search-shards-api-example]] diff --git a/docs/reference/search/search-template-api.asciidoc b/docs/reference/search/search-template-api.asciidoc index 539048a324746..038396e558607 100644 --- a/docs/reference/search/search-template-api.asciidoc +++ b/docs/reference/search/search-template-api.asciidoc @@ -78,7 +78,7 @@ this parameter or use `*`. [[search-template-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. @@ -86,27 +86,27 @@ Defaults to `true`. (Optional, Boolean) If `true`, network round-trips are minimized for cross-cluster search requests. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] `explain`:: (Optional, Boolean) If `true`, the response includes additional details about score computation as part of a hit. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=preference] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=preference] `rest_total_hits_as_int`:: (Optional, Boolean) If `true`, the response returns `hits.total` as an integer. If false, it returns `hits.total` as an object. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=scroll] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=scroll] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search_type] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search_type] `typed_keys`:: (Optional, Boolean) If `true`, the response prefixes aggregation and suggester diff --git a/docs/reference/search/search-vector-tile-api.asciidoc b/docs/reference/search/search-vector-tile-api.asciidoc index 97d2e76e7fcda..2cdc29918a699 100644 --- a/docs/reference/search/search-vector-tile-api.asciidoc +++ b/docs/reference/search/search-vector-tile-api.asciidoc @@ -429,7 +429,7 @@ include::search-vector-tile-api.asciidoc[tag=grid-type] (Optional, object) <> used to filter documents for the search. -include::{es-repo-dir}/search/search.asciidoc[tag=runtime-mappings-def] +include::{es-ref-dir}/search/search.asciidoc[tag=runtime-mappings-def] include::search-vector-tile-api.asciidoc[tag=size] diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index ffac84c11a779..db4b0febb07ba 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -1041,7 +1041,7 @@ PUT image-index option is not supported. * {blank} -include::{es-repo-dir}/search/knn-search.asciidoc[tag=hnsw-algorithm] +include::{es-ref-dir}/search/knn-search.asciidoc[tag=hnsw-algorithm] NOTE: Approximate kNN search always uses the <> search type in order to gather diff --git a/docs/reference/search/search-your-data/search-across-clusters.asciidoc b/docs/reference/search/search-your-data/search-across-clusters.asciidoc index ee1d9fcae18e8..2573722b6d2e7 100644 --- a/docs/reference/search/search-your-data/search-across-clusters.asciidoc +++ b/docs/reference/search/search-your-data/search-across-clusters.asciidoc @@ -1366,7 +1366,7 @@ following major version. For example, a local 7.17 cluster can search any remote 8.x cluster. [[ccs-version-compatibility]] -include::{es-repo-dir}/search/search-your-data/ccs-version-compat-matrix.asciidoc[] +include::{es-ref-dir}/search/search-your-data/ccs-version-compat-matrix.asciidoc[] IMPORTANT: For the <>, the local and remote clusters must use the same {es} version if they have versions prior to 7.17.7 (included) or prior to 8.5.1 (included). diff --git a/docs/reference/search/search-your-data/search-with-synonyms.asciidoc b/docs/reference/search/search-your-data/search-with-synonyms.asciidoc index 16952f94890c7..596af695b7910 100644 --- a/docs/reference/search/search-your-data/search-with-synonyms.asciidoc +++ b/docs/reference/search/search-your-data/search-with-synonyms.asciidoc @@ -47,7 +47,7 @@ An example synonyms file: [source,synonyms] -------------------------------------------------- -include::{es-test-dir}/cluster/config/analysis/synonym.txt[] +include::{elasticsearch-root}/docs/src/test/cluster/config/analysis/synonym.txt[] -------------------------------------------------- To update an existing synonyms set, upload new files to your cluster. diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 53abf0f0458af..e7e16d74764fa 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -20,7 +20,7 @@ review the corresponding instructions. [[infer-service-requirements]] ==== Requirements -include::{es-repo-dir}/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/inference-api/infer-api-requirements-widget.asciidoc[] [discrete] @@ -29,7 +29,7 @@ include::{es-repo-dir}/tab-widgets/inference-api/infer-api-requirements-widget.a Create an {infer} endpoint by using the <>: -include::{es-repo-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] [discrete] @@ -41,7 +41,7 @@ that the model will create based on your input text - must be created. The destination index must have a field with the <> field type to index the output of the used model. -include::{es-repo-dir}/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc[] [discrete] @@ -52,7 +52,7 @@ Create an <> with an <> and use the model you created above to infer against the data that is being ingested in the pipeline. -include::{es-repo-dir}/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/inference-api/infer-api-ingest-pipeline-widget.asciidoc[] [discrete] @@ -82,7 +82,7 @@ you can see an index named `test-data` with 182469 documents. Create the embeddings from the text by reindexing the data through the {infer} pipeline that uses the chosen model as the inference model. -include::{es-repo-dir}/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/inference-api/infer-api-reindex-widget.asciidoc[] The call returns a task ID to monitor the progress: @@ -114,4 +114,4 @@ provide the query text and the model you have used to create the embeddings. NOTE: If you cancelled the reindexing process, you run the query only a part of the data which affects the quality of your results. -include::{es-repo-dir}/tab-widgets/inference-api/infer-api-search-widget.asciidoc[] \ No newline at end of file +include::{es-ref-dir}/tab-widgets/inference-api/infer-api-search-widget.asciidoc[] \ No newline at end of file diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index f4768e5c3a23d..a4d892c98645b 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -64,7 +64,7 @@ important each is. After you decide which model you want to use for implementing semantic search, you need to deploy the model in {es}. -include::{es-repo-dir}/tab-widgets/semantic-search/deploy-nlp-model-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/semantic-search/deploy-nlp-model-widget.asciidoc[] [discrete] [[semantic-search-field-mappings]] @@ -74,7 +74,7 @@ Before you start using the deployed model to generate embeddings based on your input text, you need to prepare your index mapping first. The mapping of the index depends on the type of model. -include::{es-repo-dir}/tab-widgets/semantic-search/field-mappings-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/semantic-search/field-mappings-widget.asciidoc[] [discrete] [[semantic-search-generate-embeddings]] @@ -89,7 +89,7 @@ infer against the data ingested through the pipeline. After you created the ingest pipeline with the inference processor, you can ingest your data through it to generate the model output. -include::{es-repo-dir}/tab-widgets/semantic-search/generate-embeddings-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/semantic-search/generate-embeddings-widget.asciidoc[] Now it is time to perform semantic search! @@ -100,7 +100,7 @@ Now it is time to perform semantic search! Depending on the type of model you have deployed, you can query rank features with a text expansion query, or dense vectors with a kNN search. -include::{es-repo-dir}/tab-widgets/semantic-search/search-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/semantic-search/search-widget.asciidoc[] [discrete] [[semantic-search-hybrid-search]] @@ -114,7 +114,7 @@ Combining semantic and lexical search into one hybrid search request using but hybrid search using reciprocal rank fusion {blog-ref}improving-information-retrieval-elastic-stack-hybrid[has been shown to perform better in general]. -include::{es-repo-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[] [discrete] [[semantic-search-read-more]] diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 3be30c98261d5..15985088a6ff7 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -56,7 +56,7 @@ IMPORTANT: Several options for this API can be specified using a query parameter or a request body parameter. If both parameters are specified, only the query parameter is used. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `true`. @@ -70,9 +70,9 @@ no partial results. Defaults to `true`. To override the default for this field, set the `search.default_allow_partial_results` cluster setting to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyzer] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyzer] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] `batched_reduce_size`:: (Optional, integer) The number of shard results that should be reduced at once @@ -86,15 +86,15 @@ shards in the request can be large. Defaults to `512`. coordinating node and the remote clusters are minimized when executing {ccs} (CCS) requests. See <>. Defaults to `true`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=default_operator] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=default_operator] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=df] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=df] `docvalue_fields`:: (Optional, string) A comma-separated list of fields to return as the docvalue representation of a field for each hit. See <>. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] + Defaults to `open`. @@ -103,13 +103,13 @@ Defaults to `open`. computation as part of a hit. Defaults to `false`. [[search-from-param]] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from] + By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the <> parameter. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ignore_throttled] `include_named_queries_score`:: (Optional, Boolean) If `true`, includes the score contribution from any named queries. @@ -118,9 +118,9 @@ response. Typically, this adds a small overhead to a request. However, using computationally expensive named queries on a large number of hits may add significant overhead. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=lenient] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=lenient] `max_concurrent_shard_requests`:: (Optional, integer) Defines the number of concurrent shard requests per node @@ -182,7 +182,7 @@ end::search-preference[] [[search-api-query-params-q]] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-q] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-q] + You can use the `q` parameter to run a query parameter search. Query parameter searches do not support the full {es} <> but are handy for @@ -201,7 +201,7 @@ level settings. (Optional, Boolean) Indicates whether hits.total should be rendered as an integer or an object in the rest search response. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=routing] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] [[search-api-scroll-query-param]] `scroll`:: @@ -275,9 +275,9 @@ Comma-separated list of source fields to return. Wildcard (`*`) patterns are supported. ==== -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_excludes] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source_includes] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source_includes] `stats`:: (Optional, string) Specific `tag` of the request for logging and statistical @@ -322,7 +322,7 @@ returned. This parameter can only be used when the `suggest_field` query string parameter is specified. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] + Defaults to `0`, which does not terminate query execution early. @@ -466,7 +466,7 @@ search response. (Optional, Boolean) If `true`, returns detailed information about score computation as part of a hit. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from] + By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the @@ -493,30 +493,30 @@ A boost value greater than `1.0` increases the score. A boost value between [[search-api-knn]] `knn`:: (Optional, object or array of objects) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn] + .Properties of `knn` object [%collapsible%open] ==== `field`:: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-field] `filter`:: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] `k`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-k] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-k] `num_candidates`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-num-candidates] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-num-candidates] `query_vector`:: (Optional, array of floats) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector] `query_vector_builder`:: (Optional, object) @@ -526,7 +526,7 @@ not both. Refer to <> to learn more. `similarity`:: (Optional, float) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=knn-similarity] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-similarity] ==== @@ -714,7 +714,7 @@ aggregation for its associated searches. You can retrieve these stats using the <>. [[request-body-search-terminate-after]] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=terminate_after] + Defaults to `0`, which does not terminate query execution early. diff --git a/docs/reference/search/validate.asciidoc b/docs/reference/search/validate.asciidoc index 528e24151c47c..ce682e485cd27 100644 --- a/docs/reference/search/validate.asciidoc +++ b/docs/reference/search/validate.asciidoc @@ -41,7 +41,7 @@ request body. search. Supports wildcards (`*`). To search all data streams or indices, omit this parameter or use `*` or `_all`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=query] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=query] [[search-validate-api-query-params]] @@ -51,33 +51,33 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=query] (Optional, Boolean) If `true`, the validation is executed on all shards instead of one random shard per index. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyzer] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyzer] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=default_operator] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=default_operator] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=df] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=df] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] `explain`:: (Optional, Boolean) If `true`, the response returns detailed information if an error has occurred. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=lenient] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=lenient] `rewrite`:: (Optional, Boolean) If `true`, returns a more detailed explanation showing the actual Lucene query that will be executed. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-q] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-q] [[search-validate-api-example]] diff --git a/docs/reference/searchable-snapshots/apis/mount-snapshot.asciidoc b/docs/reference/searchable-snapshots/apis/mount-snapshot.asciidoc index 255b78993e1c2..ba25cebcd1e1a 100644 --- a/docs/reference/searchable-snapshots/apis/mount-snapshot.asciidoc +++ b/docs/reference/searchable-snapshots/apis/mount-snapshot.asciidoc @@ -40,7 +40,7 @@ to mount. [[searchable-snapshots-api-mount-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `wait_for_completion`:: (Optional, Boolean) If `true`, the request blocks until the operation is complete. diff --git a/docs/reference/searchable-snapshots/apis/node-cache-stats.asciidoc b/docs/reference/searchable-snapshots/apis/node-cache-stats.asciidoc index ce97a912d8753..50314b6d36f28 100644 --- a/docs/reference/searchable-snapshots/apis/node-cache-stats.asciidoc +++ b/docs/reference/searchable-snapshots/apis/node-cache-stats.asciidoc @@ -33,7 +33,7 @@ For more information, see <>. [[searchable-snapshots-api-cache-stats-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[searchable-snapshots-api-cache-stats-response-body]] diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index a10116005a6b5..8fdad409ee980 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -286,18 +286,18 @@ You can configure the following TLS/SSL settings. +{ssl-prefix}.ssl.supported_protocols+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] ifdef::verifies[] +{ssl-prefix}.ssl.verification_mode+:: (<>) deprecated:[7.16.0] Controls the verification of certificates. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] endif::verifies[] +{ssl-prefix}.ssl.cipher_suites+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] [#{ssl-context}-tls-ssl-key-trusted-certificate-settings] ===== {component} TLS/SSL key and trusted certificate settings @@ -315,23 +315,23 @@ When using PEM encoded files, use the following settings: +{ssl-prefix}.ssl.key+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +{ssl-prefix}.ssl.key_passphrase+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +{ssl-prefix}.ssl.secure_key_passphrase+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +{ssl-prefix}.ssl.certificate+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +{ssl-prefix}.ssl.certificate_authorities+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] ===== Java keystore files @@ -340,35 +340,35 @@ and certificates that should be trusted, use the following settings: +{ssl-prefix}.ssl.keystore.path+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +{ssl-prefix}.ssl.keystore.password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +{ssl-prefix}.ssl.keystore.secure_password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +{ssl-prefix}.ssl.keystore.key_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +{ssl-prefix}.ssl.keystore.secure_key_password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +{ssl-prefix}.ssl.truststore.path+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +{ssl-prefix}.ssl.truststore.password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +{ssl-prefix}.ssl.truststore.secure_password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] [#{ssl-context}-pkcs12-files] ===== PKCS#12 files @@ -380,31 +380,31 @@ PKCS#12 files are configured in the same way as Java keystore files: +{ssl-prefix}.ssl.keystore.path+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +{ssl-prefix}.ssl.keystore.type+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +{ssl-prefix}.ssl.keystore.password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +{ssl-prefix}.ssl.keystore.secure_password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +{ssl-prefix}.ssl.keystore.key_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +{ssl-prefix}.ssl.keystore.secure_key_password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +{ssl-prefix}.ssl.truststore.path+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +{ssl-prefix}.ssl.truststore.type+:: (<>) deprecated:[7.16.0] @@ -413,9 +413,9 @@ Set this to `PKCS12` to indicate that the truststore is a PKCS#12 file. +{ssl-prefix}.ssl.truststore.password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +{ssl-prefix}.ssl.truststore.secure_password+:: (<>) deprecated:[7.16.0] -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index ae8bf5d4e9006..c620d97fda425 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -613,90 +613,90 @@ Defaults to `5s` (5 seconds ). `ssl.key`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] + If the LDAP server requires client authentication, it uses this file. You cannot use this setting and `ssl.keystore.path` at the same time. `ssl.key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] `ssl.secure_key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] `ssl.certificate`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] + This certificate is presented to clients when they connect. `ssl.certificate_authorities`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] + You cannot use this setting and `ssl.truststore.path` at the same time. `ssl.keystore.path`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] + You cannot use this setting and `ssl.key` at the same time. `ssl.keystore.type`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] `ssl.keystore.password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] `ssl.keystore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] `ssl.keystore.key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] `ssl.keystore.secure_key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] `ssl.truststore.path`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] + You cannot use this setting and `ssl.certificate_authorities` at the same time. `ssl.truststore.password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] `ssl.truststore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] `ssl.truststore.type`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] `ssl.verification_mode`:: (<>) Indicates the type of verification when using `ldaps` to protect against man in the middle attacks and certificate forgery. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] `ssl.supported_protocols`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] `ssl.cipher_suites`:: (<>) Specifies the cipher suites that should be supported when communicating with the LDAP server. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] //TBD: Can this be updated to using the Java 11 URL instead or does it have to stay java8? `cache.ttl`:: @@ -915,91 +915,91 @@ Defaults to `5s` (5 seconds ). `ssl.certificate`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] + This certificate is presented to clients when they connect. `ssl.certificate_authorities`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] + You cannot use this setting and `ssl.truststore.path` at the same time. `ssl.key`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] + If the Active Directory server requires client authentication, it uses this file. You cannot use this setting and `ssl.keystore.path` at the same time. `ssl.key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] `ssl.secure_key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] `ssl.keystore.key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] `ssl.keystore.secure_key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] `ssl.keystore.password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] `ssl.secure_keystore.password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] //TBD: Why/how is this different than `ssl.keystore.secure_password`? `ssl.keystore.path`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] + You cannot use this setting and `ssl.key` at the same time. `ssl.keystore.type`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] `ssl.truststore.password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] `ssl.truststore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] `ssl.truststore.path`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] + You cannot use this setting and `ssl.certificate_authorities` at the same time. `ssl.truststore.type`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] `ssl.verification_mode`:: (<>) Indicates the type of verification when using `ldaps` to protect against man in the middle attacks and certificate forgery. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] `ssl.supported_protocols`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] `ssl.cipher_suites`:: (<>) Specifies the cipher suites that should be supported when communicating with the Active Directory server. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] //TBD: Can this be updated to using the Java 11 URL instead or does it have to stay java8? `cache.ttl`:: @@ -1060,12 +1060,12 @@ Algorithm for the truststore. Defaults to `SunX509`. `truststore.password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] If `truststore.path` is set, this setting is required. `truststore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] `truststore.path`:: (<>) @@ -1522,91 +1522,91 @@ over https. // tag::saml-ssl-key-tag[] `ssl.key` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] // end::saml-ssl-key-tag[] // tag::saml-ssl-key-passphrase-tag[] `ssl.key_passphrase` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] // end::saml-ssl-key-passphrase-tag[] `ssl.secure_key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] + You cannot use this setting and `ssl.key_passphrase` at the same time. // tag::saml-ssl-certificate-tag[] `ssl.certificate` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] // end::saml-ssl-certificate-tag[] // tag::saml-ssl-certificate-authorities-tag[] `ssl.certificate_authorities` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] // end::saml-ssl-certificate-authorities-tag[] // tag::saml-ssl-keystore-path-tag[] `ssl.keystore.path` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] // end::saml-ssl-keystore-path-tag[] // tag::saml-ssl-keystore-type[] `ssl.keystore.type` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] // end::saml-ssl-keystore-type[] // tag::saml-ssl-keystore-password-tag[] `ssl.keystore.password` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] // end::saml-ssl-keystore-password-tag[] `ssl.keystore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] + You cannot use this setting and `ssl.keystore.password` at the same time. //TBD: Why is this different name than `ssl.secure_keystore.password` elsewhere in this file? `ssl.keystore.key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] + You cannot use this setting and `ssl.keystore.secure_key_password` at the same time. `ssl.keystore.secure_key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] You cannot use this setting and `ssl.keystore.key_password` at the same time. // tag::saml-ssl-truststore-path-tag[] `ssl.truststore.path` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] // end::saml-ssl-truststore-path-tag[] // tag::saml-ssl-truststore-type-tag[] `ssl.truststore.type` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] // end::saml-ssl-truststore-type-tag[] // tag::saml-ssl-truststore-password-tag[] `ssl.truststore.password` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] // end::saml-ssl-truststore-password-tag[] `ssl.truststore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] + This setting cannot be used with `ssl.truststore.password`. @@ -1614,19 +1614,19 @@ This setting cannot be used with `ssl.truststore.password`. `ssl.verification_mode` {ess-icon}:: (<>) Controls the verification of certificates. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] // end::saml-ssl-verification-mode-tag[] // tag::saml-ssl-supported-prototols-tag[] `ssl.supported_protocols` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] // end::saml-ssl-supported-prototols-tag[] // tag::saml-ssl-cipher-suites-tag[] `ssl.cipher_suites` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] // end::saml-ssl-cipher-suites-tag[] [discrete] @@ -2008,91 +2008,91 @@ NOTE: These settings are _only_ used for the back-channel communication between // tag::oidc-ssl-key-tag[] `ssl.key` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] // end::oidc-ssl-key-tag[] // tag::oidc-ssl-key-passphrase-tag[] `ssl.key_passphrase` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] // end::oidc-ssl-key-passphrase-tag[] `ssl.secure_key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] + You cannot use this setting and `ssl.key_passphrase` at the same time. // tag::oidc-ssl-certificate-tag[] `ssl.certificate` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] // end::oidc-ssl-certificate-tag[] // tag::oidc-ssl-certificate-authorities-tag[] `ssl.certificate_authorities` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] // end::oidc-ssl-certificate-authorities-tag[] // tag::oidc-ssl-keystore-path-tag[] `ssl.keystore.path` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] // end::oidc-ssl-keystore-path-tag[] // tag::oidc-ssl-keystore-type-tag[] `ssl.keystore.type` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] // end::oidc-ssl-keystore-type-tag[] // tag::oidc-ssl-keystore-password-tag[] `ssl.keystore.password` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] // end::oidc-ssl-keystore-password-tag[] `ssl.keystore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] + You cannot use this setting and `ssl.keystore.password` at the same time. `ssl.keystore.key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] + You cannot use this setting and `ssl.keystore.secure_key_password` at the same time. `ssl.keystore.secure_key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] + You cannot use this setting and `ssl.keystore.key_password` at the same time. // tag::oidc-ssl-truststore-path-tag[] `ssl.truststore.path` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] // end::oidc-ssl-truststore-path-tag[] // tag::oidc-ssl-truststore-type-tag[] `ssl.truststore.type` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] // end::oidc-ssl-truststore-type-tag[] // tag::oidc-ssl-truststore-password-tag[] `ssl.truststore.password` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] // end::oidc-ssl-truststore-password-tag[] `ssl.truststore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] + You cannot use this setting and `ssl.truststore.password` at the same time. @@ -2100,19 +2100,19 @@ You cannot use this setting and `ssl.truststore.password` at the same time. `ssl.verification_mode` {ess-icon}:: (<>) Controls the verification of certificates. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] // end::oidc-ssl-verification-mode-tag[] // tag::oidc-ssl-supported-protocols-tag[] `ssl.supported_protocols` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] // end::oidc-ssl-supported-protocols-tag[] // tag::oidc-ssl-cipher-suites-tag[] `ssl.cipher_suites` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] // end::oidc-ssl-cipher-suites-tag[] [[ref-jwt-settings]] @@ -2443,91 +2443,91 @@ NOTE: These settings are _only_ used for the back-channel communication between // tag::jwt-ssl-key-tag[] `ssl.key` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] // end::jwt-ssl-key-tag[] // tag::jwt-ssl-key-passphrase-tag[] `ssl.key_passphrase` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] // end::jwt-ssl-key-passphrase-tag[] `ssl.secure_key_passphrase`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] + You cannot use this setting and `ssl.key_passphrase` at the same time. // tag::jwt-ssl-certificate-tag[] `ssl.certificate` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] // end::jwt-ssl-certificate-tag[] // tag::jwt-ssl-certificate-authorities-tag[] `ssl.certificate_authorities` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] // end::jwt-ssl-certificate-authorities-tag[] // tag::jwt-ssl-keystore-path-tag[] `ssl.keystore.path` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] // end::jwt-ssl-keystore-path-tag[] // tag::jwt-ssl-keystore-type-tag[] `ssl.keystore.type` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] // end::jwt-ssl-keystore-type-tag[] // tag::jwt-ssl-keystore-password-tag[] `ssl.keystore.password` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] // end::jwt-ssl-keystore-password-tag[] `ssl.keystore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] + You cannot use this setting and `ssl.keystore.password` at the same time. `ssl.keystore.key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] + You cannot use this setting and `ssl.keystore.secure_key_password` at the same time. `ssl.keystore.secure_key_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] + You cannot use this setting and `ssl.keystore.key_password` at the same time. // tag::jwt-ssl-truststore-path-tag[] `ssl.truststore.path` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] // end::jwt-ssl-truststore-path-tag[] // tag::jwt-ssl-truststore-type-tag[] `ssl.truststore.type` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-type] // end::jwt-ssl-truststore-type-tag[] // tag::jwt-ssl-truststore-password-tag[] `ssl.truststore.password` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] // end::jwt-ssl-truststore-password-tag[] `ssl.truststore.secure_password`:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] + You cannot use this setting and `ssl.truststore.password` at the same time. @@ -2535,19 +2535,19 @@ You cannot use this setting and `ssl.truststore.password` at the same time. `ssl.verification_mode` {ess-icon}:: (<>) Controls the verification of certificates. -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] // end::jwt-ssl-verification-mode-tag[] // tag::jwt-ssl-supported-protocols-tag[] `ssl.supported_protocols` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] // end::jwt-ssl-supported-protocols-tag[] // tag::jwt-ssl-cipher-suites-tag[] `ssl.cipher_suites` {ess-icon}:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] // end::jwt-ssl-cipher-suites-tag[] [discrete] diff --git a/docs/reference/settings/ssl-settings.asciidoc b/docs/reference/settings/ssl-settings.asciidoc index 2ab32c1a65c2d..1c484a444743f 100644 --- a/docs/reference/settings/ssl-settings.asciidoc +++ b/docs/reference/settings/ssl-settings.asciidoc @@ -15,7 +15,7 @@ endif::no-enabled-setting[] +{ssl-prefix}.ssl.supported_protocols+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-supported-protocols] ifdef::server[] +{ssl-prefix}.ssl.client_authentication+:: @@ -41,11 +41,11 @@ a TLS _server_ is discouraged. endif::verifies[] Defines how to verify the certificates presented by another party in the TLS connection: -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-verification-mode-values] +{ssl-prefix}.ssl.cipher_suites+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-cipher-suites-values] [#{ssl-context}-tls-ssl-key-trusted-certificate-settings] ===== {component} TLS/SSL key and trusted certificate settings @@ -66,25 +66,25 @@ When using PEM encoded files, use the following settings: +{ssl-prefix}.ssl.key+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-pem] ifndef::secure-pass[] +{ssl-prefix}.ssl.key_passphrase+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-key-passphrase] endif::secure-pass[] +{ssl-prefix}.ssl.secure_key_passphrase+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-secure-key-passphrase] +{ssl-prefix}.ssl.certificate+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate] +{ssl-prefix}.ssl.certificate_authorities+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-certificate-authorities] ===== Java keystore files @@ -93,41 +93,41 @@ and certificates that should be trusted, use the following settings: +{ssl-prefix}.ssl.keystore.path+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] ifndef::secure-pass[] +{ssl-prefix}.ssl.keystore.password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] endif::secure-pass[] +{ssl-prefix}.ssl.keystore.secure_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] ifndef::secure-pass[] +{ssl-prefix}.ssl.keystore.key_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] endif::secure-pass[] +{ssl-prefix}.ssl.keystore.secure_key_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +{ssl-prefix}.ssl.truststore.path+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] ifndef::secure-pass[] +{ssl-prefix}.ssl.truststore.password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] endif::secure-pass[] +{ssl-prefix}.ssl.truststore.secure_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] [#{ssl-context}-pkcs12-files] ===== PKCS#12 files @@ -139,35 +139,35 @@ PKCS#12 files are configured in the same way as Java keystore files: +{ssl-prefix}.ssl.keystore.path+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-path] +{ssl-prefix}.ssl.keystore.type+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-type-pkcs12] ifndef::secure-pass[] +{ssl-prefix}.ssl.keystore.password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-password] endif::secure-pass[] +{ssl-prefix}.ssl.keystore.secure_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-password] ifndef::secure-pass[] +{ssl-prefix}.ssl.keystore.key_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-key-password] endif::secure-pass[] +{ssl-prefix}.ssl.keystore.secure_key_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-keystore-secure-key-password] +{ssl-prefix}.ssl.truststore.path+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-path] +{ssl-prefix}.ssl.truststore.type+:: (<>) @@ -177,10 +177,10 @@ Set this to `PKCS12` to indicate that the truststore is a PKCS#12 file. ifndef::secure-pass[] +{ssl-prefix}.ssl.truststore.password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-password] endif::secure-pass[] +{ssl-prefix}.ssl.truststore.secure_password+:: (<>) -include::{es-repo-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] +include::{es-ref-dir}/settings/common-defs.asciidoc[tag=ssl-truststore-secure-password] diff --git a/docs/reference/setup/add-nodes.asciidoc b/docs/reference/setup/add-nodes.asciidoc index e65dd0ba7af1f..ba749782c092f 100644 --- a/docs/reference/setup/add-nodes.asciidoc +++ b/docs/reference/setup/add-nodes.asciidoc @@ -37,7 +37,7 @@ To add a node to a cluster running on multiple machines, you must also set the rest of its cluster. ==== -include::{es-repo-dir}/security/enroll-nodes.asciidoc[] +include::{es-ref-dir}/security/enroll-nodes.asciidoc[] For more information about discovery and shard allocation, refer to <> and <>. diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 1d111a07f9c2b..eea0541b898c9 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -159,7 +159,7 @@ cluster settings and node settings. The API doesn't require a restart and ensures a setting's value is the same on all nodes. ==== -include::{es-repo-dir}/cluster/update-settings.asciidoc[tag=transient-settings-warning] +include::{es-ref-dir}/cluster/update-settings.asciidoc[tag=transient-settings-warning] // end::cluster-setting-precedence[] -- diff --git a/docs/reference/setup/important-settings/path-settings.asciidoc b/docs/reference/setup/important-settings/path-settings.asciidoc index 3e87d504963a2..a0a444ca5090a 100644 --- a/docs/reference/setup/important-settings/path-settings.asciidoc +++ b/docs/reference/setup/important-settings/path-settings.asciidoc @@ -18,9 +18,9 @@ data and log to locations outside of `$ES_HOME` by default. Supported `path.data` and `path.logs` values vary by platform: -include::{es-repo-dir}/tab-widgets/customize-data-log-path-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/customize-data-log-path-widget.asciidoc[] -include::{es-repo-dir}/modules/node.asciidoc[tag=modules-node-data-path-warning-tag] +include::{es-ref-dir}/modules/node.asciidoc[tag=modules-node-data-path-warning-tag] [discrete] ==== Multiple data paths @@ -36,7 +36,7 @@ the node, even if the node’s other paths have available disk space. If you nee additional disk space, we recommend you add a new node rather than additional data paths. -include::{es-repo-dir}/tab-widgets/multi-data-path-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/multi-data-path-widget.asciidoc[] [discrete] [[mdp-migrate]] diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index edaed7c785489..c7e146a5442cf 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -1,6 +1,8 @@ [[deb]] === Install Elasticsearch with Debian Package +:include-xpack: true + The Debian package for Elasticsearch can be <> or from our <>. It can be used to install Elasticsearch on any Debian-based system such as Debian and Ubuntu. diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index a30c8c313b263..60815d570ab35 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -1,6 +1,8 @@ [[rpm]] === Install Elasticsearch with RPM +:include-xpack: true + The RPM for Elasticsearch can be <> or from our <>. It can be used to install Elasticsearch on any RPM-based system such as OpenSuSE, SLES, Centos, Red Hat, @@ -131,6 +133,8 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] +:include-xpack!: true + [id="{distro}-running-systemd"] include::systemd.asciidoc[] diff --git a/docs/reference/setup/install/targz-start.asciidoc b/docs/reference/setup/install/targz-start.asciidoc index 79c4131a57030..0539c74469e06 100644 --- a/docs/reference/setup/install/targz-start.asciidoc +++ b/docs/reference/setup/install/targz-start.asciidoc @@ -53,4 +53,4 @@ symbolic link. :slash: / -include::{es-repo-dir}/security/enroll-nodes.asciidoc[] \ No newline at end of file +include::{es-ref-dir}/security/enroll-nodes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/setup/install/targz.asciidoc b/docs/reference/setup/install/targz.asciidoc index c3dd6ad354d9d..470299abe9ac1 100644 --- a/docs/reference/setup/install/targz.asciidoc +++ b/docs/reference/setup/install/targz.asciidoc @@ -1,6 +1,8 @@ [[targz]] === Install {es} from archive on Linux or MacOS +:include-xpack: true + {es} is available as a `.tar.gz` archive for Linux and MacOS. include::license.asciidoc[] diff --git a/docs/reference/setup/install/zip-windows-start.asciidoc b/docs/reference/setup/install/zip-windows-start.asciidoc index 60edbb9ec704c..8eb4bfb9afeb9 100644 --- a/docs/reference/setup/install/zip-windows-start.asciidoc +++ b/docs/reference/setup/install/zip-windows-start.asciidoc @@ -47,4 +47,4 @@ To stop {es}, press `Ctrl-C`. :slash: \ -include::{es-repo-dir}/security/enroll-nodes.asciidoc[] \ No newline at end of file +include::{es-ref-dir}/security/enroll-nodes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/setup/install/zip-windows.asciidoc b/docs/reference/setup/install/zip-windows.asciidoc index 214feaab16acc..eb84ff149f8bd 100644 --- a/docs/reference/setup/install/zip-windows.asciidoc +++ b/docs/reference/setup/install/zip-windows.asciidoc @@ -1,6 +1,8 @@ [[zip-windows]] === Install {es} with `.zip` on Windows +:include-xpack: true + {es} can be installed on Windows using the Windows `.zip` archive. This comes with a `elasticsearch-service.bat` command which will setup {es} to run as a service. diff --git a/docs/reference/setup/logging-config.asciidoc b/docs/reference/setup/logging-config.asciidoc index f6b0ceb2d6ae6..7b36b6382c9bf 100644 --- a/docs/reference/setup/logging-config.asciidoc +++ b/docs/reference/setup/logging-config.asciidoc @@ -5,7 +5,7 @@ You can use {es}'s application logs to monitor your cluster and diagnose issues. If you run {es} as a service, the default location of the logs varies based on your platform and installation method: -include::{es-repo-dir}/tab-widgets/logging-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/logging-widget.asciidoc[] If you run {es} from the command line, {es} prints logs to the standard output (`stdout`). diff --git a/docs/reference/setup/restart-cluster.asciidoc b/docs/reference/setup/restart-cluster.asciidoc index 899cc9f465454..9488c6632836b 100644 --- a/docs/reference/setup/restart-cluster.asciidoc +++ b/docs/reference/setup/restart-cluster.asciidoc @@ -22,7 +22,7 @@ usage below the <> before to restar . *Disable shard allocation.* + -- -include::{es-repo-dir}/upgrade/disable-shard-alloc.asciidoc[] +include::{es-ref-dir}/upgrade/disable-shard-alloc.asciidoc[] -- // end::disable_shard_alloc[] @@ -72,7 +72,7 @@ or jobs with large model states. . *Shut down all nodes.* + -- -include::{es-repo-dir}/upgrade/shut-down-node.asciidoc[] +include::{es-ref-dir}/upgrade/shut-down-node.asciidoc[] -- . *Perform any needed changes.* @@ -176,7 +176,7 @@ the datafeeds from {kib} or with the <> and === Rolling restart -include::{es-repo-dir}/setup/restart-cluster.asciidoc[tag=disable_shard_alloc] +include::{es-ref-dir}/setup/restart-cluster.asciidoc[tag=disable_shard_alloc] . *Stop non-essential indexing and perform a flush.* (Optional) + @@ -191,7 +191,7 @@ POST /_flush -------------------------------------------------- -- -include::{es-repo-dir}/setup/restart-cluster.asciidoc[tag=stop_ml] +include::{es-ref-dir}/setup/restart-cluster.asciidoc[tag=stop_ml] + -- * If you perform a rolling restart, you can also leave your machine learning @@ -204,7 +204,7 @@ cluster. . *Shut down a single node in case of rolling restart.* + -- -include::{es-repo-dir}/upgrade/shut-down-node.asciidoc[] +include::{es-ref-dir}/upgrade/shut-down-node.asciidoc[] -- . *Perform any needed changes.* @@ -248,4 +248,4 @@ When the node has recovered and the cluster is stable, repeat these steps for each node that needs to be changed. -- -include::{es-repo-dir}/setup/restart-cluster.asciidoc[tag=restart_ml] +include::{es-ref-dir}/setup/restart-cluster.asciidoc[tag=restart_ml] diff --git a/docs/reference/shutdown/apis/shutdown-delete.asciidoc b/docs/reference/shutdown/apis/shutdown-delete.asciidoc index 5129b2f587be8..133539adfaa38 100644 --- a/docs/reference/shutdown/apis/shutdown-delete.asciidoc +++ b/docs/reference/shutdown/apis/shutdown-delete.asciidoc @@ -40,7 +40,7 @@ The ID of a node that you prepared for shut down. [[delete-shutdown-api-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[delete-shutdown-api-example]] ==== {api-examples-title} diff --git a/docs/reference/shutdown/apis/shutdown-get.asciidoc b/docs/reference/shutdown/apis/shutdown-get.asciidoc index 50fcb45b2f818..264a8dd7be181 100644 --- a/docs/reference/shutdown/apis/shutdown-get.asciidoc +++ b/docs/reference/shutdown/apis/shutdown-get.asciidoc @@ -40,7 +40,7 @@ If no ID is specified, returns the status of all nodes being prepared for shutdo [[get-shutdown-api-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[get-shutdown-api-example]] ==== {api-examples-title} diff --git a/docs/reference/shutdown/apis/shutdown-put.asciidoc b/docs/reference/shutdown/apis/shutdown-put.asciidoc index b8af8fb8741d8..236367f886ef9 100644 --- a/docs/reference/shutdown/apis/shutdown-put.asciidoc +++ b/docs/reference/shutdown/apis/shutdown-put.asciidoc @@ -50,7 +50,7 @@ No error is thrown if you specify an invalid node ID. [[put-shutdown-api-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[put-shutdown-api-request-body]] diff --git a/docs/reference/slm/apis/slm-get-status.asciidoc b/docs/reference/slm/apis/slm-get-status.asciidoc index fa9e2225f26e8..d4afbaddb1beb 100644 --- a/docs/reference/slm/apis/slm-get-status.asciidoc +++ b/docs/reference/slm/apis/slm-get-status.asciidoc @@ -25,7 +25,7 @@ You halt and restart the {slm-init} plugin with the ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[slm-api-get-status-prereqs]] ==== {api-prereq-title} diff --git a/docs/reference/slm/apis/slm-put.asciidoc b/docs/reference/slm/apis/slm-put.asciidoc index 69dfc4af4e84a..be265554deef5 100644 --- a/docs/reference/slm/apis/slm-put.asciidoc +++ b/docs/reference/slm/apis/slm-put.asciidoc @@ -41,7 +41,7 @@ you want to create or update. [[slm-api-put-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[slm-api-put-request-body]] @@ -55,7 +55,7 @@ Configuration for each snapshot created by the policy. [%collapsible%open] ==== :page-id: put-slm-api -include::{es-repo-dir}/snapshot-restore/apis/create-snapshot-api.asciidoc[tag=snapshot-config] +include::{es-ref-dir}/snapshot-restore/apis/create-snapshot-api.asciidoc[tag=snapshot-config] :!page-id: ==== diff --git a/docs/reference/slm/apis/slm-start.asciidoc b/docs/reference/slm/apis/slm-start.asciidoc index c17132908e41e..9d9b8108cb57b 100644 --- a/docs/reference/slm/apis/slm-start.asciidoc +++ b/docs/reference/slm/apis/slm-start.asciidoc @@ -30,7 +30,7 @@ Manually starting {slm-init} is only necessary if it has been stopped using the ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[slm-api-start-example]] ==== {api-examples-title} diff --git a/docs/reference/slm/apis/slm-stop.asciidoc b/docs/reference/slm/apis/slm-stop.asciidoc index 82b3e1b849ee5..253abec7b4d11 100644 --- a/docs/reference/slm/apis/slm-stop.asciidoc +++ b/docs/reference/slm/apis/slm-stop.asciidoc @@ -37,7 +37,7 @@ Use the <> to see if {slm-init} is running. ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [[slm-api-stop-example]] ==== {api-examples-title} diff --git a/docs/reference/snapshot-restore/apis/clean-up-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/clean-up-repo-api.asciidoc index 2360698e0be8f..249e192c0c587 100644 --- a/docs/reference/snapshot-restore/apis/clean-up-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/clean-up-repo-api.asciidoc @@ -48,7 +48,7 @@ Name of the snapshot repository to review and clean up. [[clean-up-snapshot-repo-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] [role="child_attributes"] [[clean-up-snapshot-repo-api-response-body]] diff --git a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc index fab734e8413c3..590bc7e7410f5 100644 --- a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc @@ -42,7 +42,7 @@ Name of the snapshot repository that both source and target snapshot belong to. [[clone-snapshot-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc index 06693dadd387f..baa28bb7b0a53 100644 --- a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc @@ -56,7 +56,7 @@ unique within the snapshot repository. [[create-snapshot-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `wait_for_completion`:: (Optional, Boolean) If `true`, the request returns a response when the snapshot diff --git a/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc index d3ebeeac3c036..2931faf49841d 100644 --- a/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/delete-repo-api.asciidoc @@ -51,7 +51,7 @@ supported. [[delete-snapshot-repo-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/apis/delete-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/delete-snapshot-api.asciidoc index 5bc46f54ec137..d1431b8cb6706 100644 --- a/docs/reference/snapshot-restore/apis/delete-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/delete-snapshot-api.asciidoc @@ -56,7 +56,7 @@ Comma-separated list of snapshot names to delete. Also accepts wildcards (`*`). [[delete-snapshot-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[delete-snapshot-api-example]] ==== {api-example-title} diff --git a/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc index 1f03a44c5e49f..cf1b9813c519e 100644 --- a/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-repo-api.asciidoc @@ -59,7 +59,7 @@ cluster, omit this parameter or use `*` or `_all`. only. If `false`, the request gets information from the master node. Defaults to `false`. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [role="child_attributes"] [[get-snapshot-repo-api-response-body]] diff --git a/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc index e685badc31d4b..622e1ade024b0 100644 --- a/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-snapshot-api.asciidoc @@ -83,7 +83,7 @@ Set <> to `true` to re [[get-snapshot-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] [[get-snapshot-api-ignore-unavailable]] `ignore_unavailable`:: diff --git a/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc b/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc index 150f4dfff48ab..d8b03cbc0e880 100644 --- a/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/get-snapshot-status-api.asciidoc @@ -134,7 +134,7 @@ currently running snapshots. Wildcards (`*`) are not supported. [[get-snapshot-status-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `ignore_unavailable`:: (Optional, Boolean) diff --git a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc index 1154b970b907d..c3e9c0a0904be 100644 --- a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc @@ -52,7 +52,7 @@ IMPORTANT: Several options for this API can be specified using a query parameter or a request body parameter. If both parameters are specified, only the query parameter is used. -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc index db8923505973b..9fe06d73f1a63 100644 --- a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc @@ -113,7 +113,7 @@ Name of the snapshot to restore. [[restore-snapshot-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `wait_for_completion`:: (Optional, Boolean) If `true`, the request returns a response when the restore diff --git a/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc index f0fc659df1a41..9d14e8a426e32 100644 --- a/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/verify-repo-api.asciidoc @@ -47,7 +47,7 @@ Name of the snapshot repository to verify. [[verify-snapshot-repo-api-query-params]] ==== {api-query-parms-title} -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] `timeout`:: (Optional, <>) Specifies the period of time to wait for diff --git a/docs/reference/snapshot-restore/repository-read-only-url.asciidoc b/docs/reference/snapshot-restore/repository-read-only-url.asciidoc index 8f9cb7e198f81..53d0c340fcdc1 100644 --- a/docs/reference/snapshot-restore/repository-read-only-url.asciidoc +++ b/docs/reference/snapshot-restore/repository-read-only-url.asciidoc @@ -1,7 +1,7 @@ [[snapshots-read-only-repository]] === Read-only URL repository -include::{es-repo-dir}/snapshot-restore/on-prem-repo-type.asciidoc[] +include::{es-ref-dir}/snapshot-restore/on-prem-repo-type.asciidoc[] You can use a URL repository to give a cluster read-only access to a shared file system. Since URL repositories are always read-only, they're a safer and more diff --git a/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc b/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc index 6be49d9d4422f..be5347845a2fb 100644 --- a/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc +++ b/docs/reference/snapshot-restore/repository-shared-file-system.asciidoc @@ -1,7 +1,7 @@ [[snapshots-filesystem-repository]] === Shared file system repository -include::{es-repo-dir}/snapshot-restore/on-prem-repo-type.asciidoc[] +include::{es-ref-dir}/snapshot-restore/on-prem-repo-type.asciidoc[] Use a shared file system repository to store snapshots on a shared file system. @@ -13,7 +13,7 @@ master and data node. For running clusters, this requires a Supported `path.repo` values vary by platform: -include::{es-repo-dir}/tab-widgets/register-fs-repo-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/register-fs-repo-widget.asciidoc[] [[filesystem-repository-settings]] ==== Repository settings diff --git a/docs/reference/sql/apis/sql-search-api.asciidoc b/docs/reference/sql/apis/sql-search-api.asciidoc index 596378c5e439d..118d7975aefd9 100644 --- a/docs/reference/sql/apis/sql-search-api.asciidoc +++ b/docs/reference/sql/apis/sql-search-api.asciidoc @@ -123,7 +123,7 @@ the scroll request. Defaults to `45s` (45 seconds). (Optional, <>) Timeout before the request fails. Defaults to `90s` (90 seconds). -include::{es-repo-dir}/search/search.asciidoc[tag=runtime-mappings-def] +include::{es-ref-dir}/search/search.asciidoc[tag=runtime-mappings-def] [[sql-search-api-time-zone]] `time_zone`:: diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index 1d2b2e7fd3ec8..2ae7ea78a803f 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -2,7 +2,7 @@ [[xpack-sql]] = SQL -:sql-tests: {xes-repo-dir}/../../plugin/sql/qa/ +:sql-tests: {elasticsearch-root}/x-pack/docs/{lang}/../../plugin/sql/qa/ :sql-specs: {sql-tests}server/src/main/resources/ :jdbc-tests: {sql-tests}jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc :security-tests: {sql-tests}server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security diff --git a/docs/reference/sql/security.asciidoc b/docs/reference/sql/security.asciidoc index b57179742700c..a440ae69d8daf 100644 --- a/docs/reference/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -44,7 +44,7 @@ APIs to view or edit a role defined in `roles.yml`. This example configures a role that can run SQL in JDBC querying the `test` index: -include::{es-repo-dir}/rest-api/security/create-roles.asciidoc[tag=sql-queries-permission] +include::{es-ref-dir}/rest-api/security/create-roles.asciidoc[tag=sql-queries-permission] [discrete] [[sql-role-file-example]] diff --git a/docs/reference/tab-widgets/ilm.asciidoc b/docs/reference/tab-widgets/ilm.asciidoc index 60a92135d7733..3682377d754e5 100644 --- a/docs/reference/tab-widgets/ilm.asciidoc +++ b/docs/reference/tab-widgets/ilm.asciidoc @@ -85,5 +85,5 @@ Index Lifecycle Policies**. Click **Create policy**. You can also use the <>. -include::{es-repo-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ilm-policy-api-ex] +include::{es-ref-dir}/data-streams/set-up-a-data-stream.asciidoc[tag=ilm-policy-api-ex] // end::custom[] diff --git a/docs/reference/tab-widgets/register-fs-repo.asciidoc b/docs/reference/tab-widgets/register-fs-repo.asciidoc index 6ff0bf23ca9e2..f222f87f5a3e5 100644 --- a/docs/reference/tab-widgets/register-fs-repo.asciidoc +++ b/docs/reference/tab-widgets/register-fs-repo.asciidoc @@ -48,7 +48,7 @@ PUT _snapshot/my_fs_backup relative path, `my_fs_backup_location`, resolves to `/mount/backups/my_fs_backup_location`. -include::{es-repo-dir}/snapshot-restore/register-repository.asciidoc[tag=multi-cluster-repo] +include::{es-ref-dir}/snapshot-restore/register-repository.asciidoc[tag=multi-cluster-repo] // tag::fs-repo-read-only[] To register a file system repository as read-only using the create snapshot @@ -119,7 +119,7 @@ PUT _snapshot/my_fs_backup relative path, `My_fs_backup_location`, resolves to `E:\Mount\Backups\My_fs_backup_location`. -include::{es-repo-dir}/snapshot-restore/register-repository.asciidoc[tag=multi-cluster-repo] +include::{es-ref-dir}/snapshot-restore/register-repository.asciidoc[tag=multi-cluster-repo] include::register-fs-repo.asciidoc[tag=fs-repo-read-only] diff --git a/docs/reference/tab-widgets/snapshot-repo.asciidoc b/docs/reference/tab-widgets/snapshot-repo.asciidoc index e3871d84e4832..b41ddbf128a33 100644 --- a/docs/reference/tab-widgets/snapshot-repo.asciidoc +++ b/docs/reference/tab-widgets/snapshot-repo.asciidoc @@ -15,5 +15,5 @@ You can also use any of the following custom repository types with {search-snaps // end::cloud[] // tag::self-managed[] -include::{es-repo-dir}/searchable-snapshots/index.asciidoc[tag=searchable-snapshot-repo-types] +include::{es-ref-dir}/searchable-snapshots/index.asciidoc[tag=searchable-snapshot-repo-types] // end::self-managed[] diff --git a/docs/reference/text-structure/apis/find-field-structure.asciidoc b/docs/reference/text-structure/apis/find-field-structure.asciidoc index 6788ddf7f42be..4fa108e92d4cb 100644 --- a/docs/reference/text-structure/apis/find-field-structure.asciidoc +++ b/docs/reference/text-structure/apis/find-field-structure.asciidoc @@ -57,22 +57,22 @@ chosen. `field`:: (Required, string) The name of the field that's analyzed. -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-column-names] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-delimiter] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-column-names] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-delimiter] `documents_to_sample`:: (Optional, unsigned integer) The number of documents to include in the structural analysis. The minimum is 2; the default is 1000. -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-explain] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-format] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-grok-pattern] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-ecs-compatibility] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-quote] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-should-trim-fields] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timeout] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-field] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-format] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-explain] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-format] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-grok-pattern] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-ecs-compatibility] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-quote] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-should-trim-fields] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timeout] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-field] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-format] [discrete] [[find-field-structure-examples]] diff --git a/docs/reference/text-structure/apis/find-message-structure.asciidoc b/docs/reference/text-structure/apis/find-message-structure.asciidoc index 085f65b852126..6c1bf5089bed2 100644 --- a/docs/reference/text-structure/apis/find-message-structure.asciidoc +++ b/docs/reference/text-structure/apis/find-message-structure.asciidoc @@ -52,17 +52,17 @@ chosen. [[find-message-structure-query-parms]] == {api-query-parms-title} -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-column-names] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-delimiter] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-explain] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-format] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-grok-pattern] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-ecs-compatibility] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-quote] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-should-trim-fields] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timeout] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-field] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-format] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-column-names] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-delimiter] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-explain] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-format] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-grok-pattern] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-ecs-compatibility] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-quote] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-should-trim-fields] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timeout] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-field] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-format] [discrete] [[find-message-structure-request-body]] diff --git a/docs/reference/text-structure/apis/find-structure.asciidoc b/docs/reference/text-structure/apis/find-structure.asciidoc index b49b0f3526689..361560bace4ed 100644 --- a/docs/reference/text-structure/apis/find-structure.asciidoc +++ b/docs/reference/text-structure/apis/find-structure.asciidoc @@ -55,21 +55,21 @@ chosen. [[find-structure-query-parms]] == {api-query-parms-title} -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-charset] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-column-names] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-delimiter] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-explain] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-format] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-grok-pattern] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-ecs-compatibility] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-has-header-row] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-line-merge-size-limit] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-lines-to-sample] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-quote] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-should-trim-fields] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timeout] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-field] -include::{es-repo-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-format] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-charset] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-column-names] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-delimiter] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-explain] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-format] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-grok-pattern] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-ecs-compatibility] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-has-header-row] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-line-merge-size-limit] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-lines-to-sample] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-quote] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-should-trim-fields] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timeout] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-field] +include::{es-ref-dir}/text-structure/apis/find-structure-shared.asciidoc[tag=param-timestamp-format] [discrete] [[find-structure-request-body]] diff --git a/docs/reference/transform/apis/delete-transform.asciidoc b/docs/reference/transform/apis/delete-transform.asciidoc index 9a097407749a3..111dda23690b6 100644 --- a/docs/reference/transform/apis/delete-transform.asciidoc +++ b/docs/reference/transform/apis/delete-transform.asciidoc @@ -26,7 +26,7 @@ in the `transform_admin` built-in role. ``:: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id] [[delete-transform-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/transform/apis/get-transform-stats.asciidoc b/docs/reference/transform/apis/get-transform-stats.asciidoc index 2a7ed1913546e..273b1d094979e 100644 --- a/docs/reference/transform/apis/get-transform-stats.asciidoc +++ b/docs/reference/transform/apis/get-transform-stats.asciidoc @@ -49,7 +49,7 @@ specifying `*` as the ``, or by omitting the ``:: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] [[get-transform-stats-query-parms]] @@ -57,15 +57,15 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms1] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms1] `from`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from-transforms] `size`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=size-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=size-transforms] [role="child_attributes"] [[get-transform-stats-response]] @@ -84,7 +84,7 @@ informational; you cannot update their values. ==== `changes_last_detected_at`::: (date) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-changes-last-detected-at] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-changes-last-detected-at] //Begin checkpointing.last `last`::: @@ -106,7 +106,7 @@ was created. `last_search_time`::: (date) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-last-search-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=checkpointing-last-search-time] //Begin checkpointing.next `next`::: @@ -191,7 +191,7 @@ that the {transform} is failing to keep up. `id`:: (string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id] //Begin node `node`:: @@ -221,11 +221,11 @@ example, `127.0.0.1:9300`. `reason`:: (string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=state-transform-reason] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=state-transform-reason] `state`:: (string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=state-transform] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=state-transform] //Begin stats `stats`:: @@ -237,71 +237,71 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=state-transform] `delete_time_in_ms`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=delete-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=delete-time-ms] `documents_deleted`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted-transform] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-deleted-transform] `documents_indexed`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-indexed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-indexed] `documents_processed`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=docs-processed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=docs-processed] `exponential_avg_checkpoint_duration_ms`::: (double) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-checkpoint-duration-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-checkpoint-duration-ms] `exponential_avg_documents_indexed`::: (double) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-indexed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-indexed] `exponential_avg_documents_processed`::: (double) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-processed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=exponential-avg-documents-processed] `index_failures`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-failures] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-failures] `index_time_in_ms`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-time-ms] `index_total`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-total] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=index-total] `pages_processed`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pages-processed] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pages-processed] `processing_time_in_ms`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=processing-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=processing-time-ms] `processing_total`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=processing-total] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=processing-total] `search_failures`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-failures] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-failures] `search_time_in_ms`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-time-ms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-time-ms] `search_total`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=search-total] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=search-total] `trigger_count`::: (long) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=trigger-count] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=trigger-count] ==== //End stats diff --git a/docs/reference/transform/apis/get-transform.asciidoc b/docs/reference/transform/apis/get-transform.asciidoc index c008e34e74fd4..ece59138e2893 100644 --- a/docs/reference/transform/apis/get-transform.asciidoc +++ b/docs/reference/transform/apis/get-transform.asciidoc @@ -40,14 +40,14 @@ specifying `*` as the ``, or by omitting the ``. ``:: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id-wildcard] [[get-transform-query-parms]] == {api-query-parms-title} `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms1] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms1] `exclude_generated`:: (Optional, Boolean) @@ -57,11 +57,11 @@ and then added to another cluster. Default is false. `from`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from-transforms] `size`:: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=size-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=size-transforms] diff --git a/docs/reference/transform/apis/preview-transform.asciidoc b/docs/reference/transform/apis/preview-transform.asciidoc index 86ca91b32fdf1..fa9ad0c0fc8f9 100644 --- a/docs/reference/transform/apis/preview-transform.asciidoc +++ b/docs/reference/transform/apis/preview-transform.asciidoc @@ -91,29 +91,29 @@ expires, the request fails and returns an error. Defaults to `30s`. //Begin dest `dest`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest] + .Properties of `dest` [%collapsible%open] ==== `index`::: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-index] `pipeline`::: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] ==== //End dest `frequency`:: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=frequency] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=frequency] //Begin latest `latest`:: (Required^*^, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-latest] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-latest] + .Properties of `latest` [%collapsible%open] @@ -121,11 +121,11 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-latest] `sort`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-sort] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-sort] `unique_key`::: (Required, array of strings) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key] ==== //End latest @@ -133,43 +133,43 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key] //Begin pivot `pivot`:: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pivot] + .Properties of `pivot` [%collapsible%open] ==== `aggregations` or `aggs`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot-aggs] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pivot-aggs] `group_by`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot-group-by] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pivot-group-by] ==== //End pivot //Begin retention policy `retention_policy`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention] + .Properties of `retention_policy` [%collapsible%open] ==== `time`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time] + .Properties of `time` [%collapsible%open] ===== `field`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-field] `max_age`::: (Required, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-max-age] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-max-age] ===== ==== //End retention policy @@ -177,29 +177,29 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-ti //Begin source `source`:: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] + .Properties of `source` [%collapsible%open] ==== `index`::: (Required, string or array) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] `query`::: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-query-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-query-transforms] `runtime_mappings`::: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-runtime-mappings-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-runtime-mappings-transforms] ==== //End source //Begin sync `sync`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync] + .Properties of `sync` [%collapsible%open] @@ -208,18 +208,18 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync] `time`::: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time] + .Properties of `time` [%collapsible%open] ===== `delay`:::: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-delay] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time-delay] `field`:::: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] ===== //End sync.time ==== @@ -228,29 +228,29 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] //Begin settings `settings`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings] + .Properties of `settings` [%collapsible%open] ==== `align_checkpoints`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-align-checkpoints] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-align-checkpoints] `dates_as_epoch_millis`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-dates-as-epoch-milli] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-dates-as-epoch-milli] `deduce_mappings`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-deduce-mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-deduce-mappings] `docs_per_second`::: (Optional, float) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] `max_page_search_size`::: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] `unattended`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-unattended] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-unattended] ==== //End settings diff --git a/docs/reference/transform/apis/put-transform.asciidoc b/docs/reference/transform/apis/put-transform.asciidoc index a3f885795732e..ed2ceba0a7a59 100644 --- a/docs/reference/transform/apis/put-transform.asciidoc +++ b/docs/reference/transform/apis/put-transform.asciidoc @@ -101,7 +101,7 @@ expires, the request fails and returns an error. Defaults to `30s`. //Begin dest `dest`:: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest] + .Properties of `dest` [%collapsible%open] @@ -109,12 +109,12 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest] `index`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-index] //Begin aliases `aliases`::: (Optional, array of objects) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases] + .Properties of `aliases` [%collapsible%open] @@ -122,29 +122,29 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases] `alias`:::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-alias] `move_on_creation`:::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-move-on-creation] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-move-on-creation] ===== //End aliases `pipeline`::: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] ==== //End dest `frequency`:: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=frequency] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=frequency] //Begin latest `latest`:: (Required^*^, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-latest] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-latest] + .Properties of `latest` [%collapsible%open] @@ -152,11 +152,11 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-latest] `sort`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-sort] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-sort] `unique_key`::: (Required, array of strings) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key] ==== //End latest @@ -164,13 +164,13 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key] //Begin _meta `_meta`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-metadata] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-metadata] //End _meta //Begin pivot `pivot`:: (Required^*^, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pivot] + .Properties of `pivot` [%collapsible%open] @@ -178,36 +178,36 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot] `aggregations` or `aggs`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot-aggs] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pivot-aggs] `group_by`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=pivot-group-by] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=pivot-group-by] ==== //End pivot //Begin retention policy `retention_policy`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention] + .Properties of `retention_policy` [%collapsible%open] ==== `time`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time] + .Properties of `time` [%collapsible%open] ===== `field`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-field] `max_age`::: (Required, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-max-age] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-max-age] ===== ==== //End retention policy @@ -215,39 +215,39 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-ti //Begin settings `settings`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings] + .Properties of `settings` [%collapsible%open] ==== `align_checkpoints`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-align-checkpoints] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-align-checkpoints] `dates_as_epoch_millis`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-dates-as-epoch-milli] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-dates-as-epoch-milli] `deduce_mappings`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-deduce-mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-deduce-mappings] `docs_per_second`::: (Optional, float) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] `max_page_search_size`::: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] `num_failure_retries`::: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-num-failure-retries] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-num-failure-retries] `unattended`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-unattended] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-unattended] ==== //End settings //Begin source `source`:: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] + .Properties of `source` [%collapsible%open] @@ -255,22 +255,22 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] `index`::: (Required, string or array) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] `query`::: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-query-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-query-transforms] `runtime_mappings`::: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-runtime-mappings-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-runtime-mappings-transforms] ==== //End source //Begin sync `sync`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync] + .Properties of `sync` [%collapsible%open] @@ -279,18 +279,18 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync] //Begin time `time`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time] + .Properties of `time` [%collapsible%open] ===== `delay`:::: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-delay] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time-delay] `field`:::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] + -- TIP: It is strongly recommended to use a field that contains the diff --git a/docs/reference/transform/apis/reset-transform.asciidoc b/docs/reference/transform/apis/reset-transform.asciidoc index badbc0f022315..1194d3589275d 100644 --- a/docs/reference/transform/apis/reset-transform.asciidoc +++ b/docs/reference/transform/apis/reset-transform.asciidoc @@ -38,7 +38,7 @@ to the latest format as if the <> API was used. The ``:: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id] [[reset-transform-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/transform/apis/schedule-now-transform.asciidoc b/docs/reference/transform/apis/schedule-now-transform.asciidoc index 202b2d61fc730..7a276edf08819 100644 --- a/docs/reference/transform/apis/schedule-now-transform.asciidoc +++ b/docs/reference/transform/apis/schedule-now-transform.asciidoc @@ -35,7 +35,7 @@ called again in the meantime. ``:: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id] [[schedule-now-transform-query-parms]] == {api-query-parms-title} diff --git a/docs/reference/transform/apis/stop-transform.asciidoc b/docs/reference/transform/apis/stop-transform.asciidoc index 670e015c33149..e99fcbd413eba 100644 --- a/docs/reference/transform/apis/stop-transform.asciidoc +++ b/docs/reference/transform/apis/stop-transform.asciidoc @@ -41,7 +41,7 @@ comma-separated list or a wildcard expression. To stop all {transforms}, use `allow_no_match`:: (Optional, Boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms2] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=allow-no-match-transforms2] `force`:: (Optional, Boolean) Set to `true` to stop a failed {transform} or to diff --git a/docs/reference/transform/apis/update-transform.asciidoc b/docs/reference/transform/apis/update-transform.asciidoc index 57a27b4efae76..1ac7d6d5410d2 100644 --- a/docs/reference/transform/apis/update-transform.asciidoc +++ b/docs/reference/transform/apis/update-transform.asciidoc @@ -57,7 +57,7 @@ permanent failure. ``:: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-id] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-id] [[update-transform-query-parms]] == {api-query-parms-title} @@ -82,7 +82,7 @@ expires, the request fails and returns an error. Defaults to `30s`. //Begin dest `dest`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest] + .Properties of `dest` [%collapsible%open] @@ -90,12 +90,12 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest] `index`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-index] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-index] //Begin aliases `aliases`::: (Optional, array of objects) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases] + .Properties of `aliases` [%collapsible%open] @@ -103,53 +103,53 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases] `alias`:::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-alias] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-alias] `move_on_creation`:::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-move-on-creation] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-aliases-move-on-creation] ===== //End aliases `pipeline`::: (Optional, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline] ==== //End dest `frequency`:: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=frequency] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=frequency] //Begin _meta `_meta`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-metadata] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-metadata] //End _meta //Begin retention policy `retention_policy`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention] + .Properties of `retention_policy` [%collapsible%open] ==== `time`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time] + .Properties of `time` [%collapsible%open] ===== `field`::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-field] `max_age`::: (Required, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-max-age] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-time-max-age] ===== ==== //End retention policy @@ -157,39 +157,39 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-retention-ti //Begin settings `settings`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings] + .Properties of `settings` [%collapsible%open] ==== `align_checkpoints`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-align-checkpoints] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-align-checkpoints] `dates_as_epoch_millis`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-dates-as-epoch-milli] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-dates-as-epoch-milli] `deduce_mappings`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-deduce-mappings] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-deduce-mappings] `docs_per_second`::: (Optional, float) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-docs-per-second] `max_page_search_size`::: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-max-page-search-size] `num_failure_retries`::: (Optional, integer) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-num-failure-retries] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-num-failure-retries] `unattended`::: (Optional, boolean) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-unattended] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=transform-settings-unattended] ==== //End settings //Begin source `source`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] + .Properties of `source` [%collapsible%open] @@ -197,18 +197,18 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-transforms] `index`::: (Required, string or array) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-index-transforms] `query`::: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=source-query-transforms] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=source-query-transforms] ==== //End source //Begin sync `sync`:: (Optional, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync] + -- NOTE: You can update these properties only if it is a continuous {transform}. You @@ -224,7 +224,7 @@ Instead, clone the {transform} in {kib} and add or remove the `sync` property. //Begin sync.time `time`::: (Required, object) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time] + .Properties of `time` [%collapsible%open] @@ -232,11 +232,11 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time] `delay`:::: (Optional, <>) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-delay] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time-delay] `field`:::: (Required, string) -include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=sync-time-field] + -- TIP: In general, it’s a good idea to use a field that contains the diff --git a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc index 2d23763a7576b..fe9422d6d4c53 100644 --- a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc +++ b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc @@ -6,7 +6,7 @@ allocation settings to lack of disk space. In order to diagnose the unassigned shards in your deployment use the following steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/diagnose-unassigned-shards-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/diagnose-unassigned-shards-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/common-issues/high-cpu-usage.asciidoc b/docs/reference/troubleshooting/common-issues/high-cpu-usage.asciidoc index 536d18b653580..858683ef97a6d 100644 --- a/docs/reference/troubleshooting/common-issues/high-cpu-usage.asciidoc +++ b/docs/reference/troubleshooting/common-issues/high-cpu-usage.asciidoc @@ -15,7 +15,7 @@ depleted, {es} will reject search requests until more threads are available. **Check CPU usage** -include::{es-repo-dir}/tab-widgets/cpu-usage-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/cpu-usage-widget.asciidoc[] **Check hot threads** diff --git a/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc b/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc index c71dda063dead..e88927f159f21 100644 --- a/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc +++ b/docs/reference/troubleshooting/common-issues/high-jvm-memory-pressure.asciidoc @@ -12,7 +12,7 @@ exceeds 85%. **Check JVM memory pressure** -include::{es-repo-dir}/tab-widgets/jvm-memory-pressure-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/jvm-memory-pressure-widget.asciidoc[] **Check garbage collection logs** diff --git a/docs/reference/troubleshooting/data/add-tier.asciidoc b/docs/reference/troubleshooting/data/add-tier.asciidoc index 830cb2bbe2961..03077d7da1bb3 100644 --- a/docs/reference/troubleshooting/data/add-tier.asciidoc +++ b/docs/reference/troubleshooting/data/add-tier.asciidoc @@ -6,7 +6,7 @@ The allocation of indices in an {es} deployment can be allocated on <> the indices expect to be allocated on to your deployment: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/add-tier-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/add-tier-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/data-tiers-mixed-with-node-attr.asciidoc b/docs/reference/troubleshooting/data/data-tiers-mixed-with-node-attr.asciidoc index 7bf6ef91cce74..2f6b75b372fda 100644 --- a/docs/reference/troubleshooting/data/data-tiers-mixed-with-node-attr.asciidoc +++ b/docs/reference/troubleshooting/data/data-tiers-mixed-with-node-attr.asciidoc @@ -12,7 +12,7 @@ This could lead to unassigned shards or shards not transitioning to the desired In order to fix this follow the next steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/migrate-to-data-tiers-routing-guide-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/migrate-to-data-tiers-routing-guide-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/enable-cluster-allocation.asciidoc b/docs/reference/troubleshooting/data/enable-cluster-allocation.asciidoc index 787588d2a1656..e229d9016dfe6 100644 --- a/docs/reference/troubleshooting/data/enable-cluster-allocation.asciidoc +++ b/docs/reference/troubleshooting/data/enable-cluster-allocation.asciidoc @@ -10,7 +10,7 @@ Forgetting to re-allow all data allocations can lead to unassigned shards. In order to (re)allow all data to be allocated follow these steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/enable-cluster-allocation-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/enable-cluster-allocation-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/enable-index-allocation.asciidoc b/docs/reference/troubleshooting/data/enable-index-allocation.asciidoc index abdc3cade5c62..a4f266ad2d218 100644 --- a/docs/reference/troubleshooting/data/enable-index-allocation.asciidoc +++ b/docs/reference/troubleshooting/data/enable-index-allocation.asciidoc @@ -10,7 +10,7 @@ Forgetting to re-allow all data allocation can lead to unassigned shards. In order to (re)allow all data to be allocated follow these steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/enable-index-allocation-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/enable-index-allocation-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/increase-cluster-shard-limit.asciidoc b/docs/reference/troubleshooting/data/increase-cluster-shard-limit.asciidoc index 916677329b2bc..8b8703f9a9dc1 100644 --- a/docs/reference/troubleshooting/data/increase-cluster-shard-limit.asciidoc +++ b/docs/reference/troubleshooting/data/increase-cluster-shard-limit.asciidoc @@ -14,7 +14,7 @@ satisfy the configuration. In order to fix this follow the next steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/increase-cluster-shard-limit-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/increase-cluster-shard-limit-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/increase-shard-limit.asciidoc b/docs/reference/troubleshooting/data/increase-shard-limit.asciidoc index f97bff73bf108..121b5348ab36a 100644 --- a/docs/reference/troubleshooting/data/increase-shard-limit.asciidoc +++ b/docs/reference/troubleshooting/data/increase-shard-limit.asciidoc @@ -12,7 +12,7 @@ satisfy the index configuration. In order to fix this follow the next steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/total-shards-per-node-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/total-shards-per-node-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/increase-tier-capacity.asciidoc b/docs/reference/troubleshooting/data/increase-tier-capacity.asciidoc index a413918cee480..362a14c3874db 100644 --- a/docs/reference/troubleshooting/data/increase-tier-capacity.asciidoc +++ b/docs/reference/troubleshooting/data/increase-tier-capacity.asciidoc @@ -15,7 +15,7 @@ replicas, you can influence this behavior by adding more nodes to the cluster In order to fix this follow the next steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/increase-tier-capacity-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/increase-tier-capacity-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/restore-from-snapshot.asciidoc b/docs/reference/troubleshooting/data/restore-from-snapshot.asciidoc index 95d8d8c4aff8e..4b5de39376860 100644 --- a/docs/reference/troubleshooting/data/restore-from-snapshot.asciidoc +++ b/docs/reference/troubleshooting/data/restore-from-snapshot.asciidoc @@ -9,4 +9,4 @@ contain a copy of the data anymore. IMPORTANT: Restoring the missing data requires you to have a backup of the affected indices and data streams that is up-to-date enough for your use case. Please do not proceed without confirming this. -include::{es-repo-dir}/tab-widgets/troubleshooting/data/restore-from-snapshot-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/restore-from-snapshot-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/start-ilm.asciidoc b/docs/reference/troubleshooting/data/start-ilm.asciidoc index 18f1bd4f5353f..1798af814e508 100644 --- a/docs/reference/troubleshooting/data/start-ilm.asciidoc +++ b/docs/reference/troubleshooting/data/start-ilm.asciidoc @@ -5,7 +5,7 @@ Automatic index lifecycle and data retention management is currently disabled. In order to start the automatic {ilm} service, follow these steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/start-ilm-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/start-ilm-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/data/start-slm.asciidoc b/docs/reference/troubleshooting/data/start-slm.asciidoc index 7b5e57cfbf6ce..66264e8ac1b2d 100644 --- a/docs/reference/troubleshooting/data/start-slm.asciidoc +++ b/docs/reference/troubleshooting/data/start-slm.asciidoc @@ -6,7 +6,7 @@ snapshots will not be created automatically. In order to start the snapshot lifecycle management service, follow these steps: -include::{es-repo-dir}/tab-widgets/troubleshooting/data/start-slm-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/data/start-slm-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/disk/fix-data-node-out-of-disk.asciidoc b/docs/reference/troubleshooting/disk/fix-data-node-out-of-disk.asciidoc index b98b78582900c..67541cb036bf3 100644 --- a/docs/reference/troubleshooting/disk/fix-data-node-out-of-disk.asciidoc +++ b/docs/reference/troubleshooting/disk/fix-data-node-out-of-disk.asciidoc @@ -11,7 +11,7 @@ of two ways: [[increase-capacity-data-node]] === Increase the disk capacity of data nodes -include::{es-repo-dir}/tab-widgets/troubleshooting/disk/increase-data-node-capacity-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/disk/increase-data-node-capacity-widget.asciidoc[] [[decrease-disk-usage-data-node]] === Decrease the disk usage of data nodes @@ -20,4 +20,4 @@ In order to decrease the disk usage in your cluster without losing any data, you NOTE: Reducing the replicas of an index can potentially reduce search throughput and data redundancy. However, it can quickly give the cluster breathing room until a more permanent solution is in place. -include::{es-repo-dir}/tab-widgets/troubleshooting/disk/decrease-data-node-disk-usage-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/disk/decrease-data-node-disk-usage-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/disk/fix-master-node-out-of-disk.asciidoc b/docs/reference/troubleshooting/disk/fix-master-node-out-of-disk.asciidoc index 6a32ab8aff375..73268a0f433e8 100644 --- a/docs/reference/troubleshooting/disk/fix-master-node-out-of-disk.asciidoc +++ b/docs/reference/troubleshooting/disk/fix-master-node-out-of-disk.asciidoc @@ -5,4 +5,4 @@ out of space, you need to ensure that they have enough disk space to function. If the <> reports that your master node is out of space you need to increase the disk capacity of your master nodes. -include::{es-repo-dir}/tab-widgets/troubleshooting/disk/increase-master-node-capacity-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/disk/increase-master-node-capacity-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/disk/fix-other-node-out-of-disk.asciidoc b/docs/reference/troubleshooting/disk/fix-other-node-out-of-disk.asciidoc index d53e5337fad8c..9334e0f17fd8d 100644 --- a/docs/reference/troubleshooting/disk/fix-other-node-out-of-disk.asciidoc +++ b/docs/reference/troubleshooting/disk/fix-other-node-out-of-disk.asciidoc @@ -6,4 +6,4 @@ for example machine learning. If one or more of these nodes are running out of s enough disk space to function. If the <> reports that a node that is not a master and does not contain data is out of space you need to increase the disk capacity of this node. -include::{es-repo-dir}/tab-widgets/troubleshooting/disk/increase-other-node-capacity-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/disk/increase-other-node-capacity-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/snapshot/add-repository.asciidoc b/docs/reference/troubleshooting/snapshot/add-repository.asciidoc index 0de4667bd9688..386c2561c03c6 100644 --- a/docs/reference/troubleshooting/snapshot/add-repository.asciidoc +++ b/docs/reference/troubleshooting/snapshot/add-repository.asciidoc @@ -16,7 +16,7 @@ guide. To remedy the situation mark the repository as read-only or remove it from all the other deployments, and re-add (recreate) the repository in the current deployment: -include::{es-repo-dir}/tab-widgets/troubleshooting/snapshot/corrupt-repository-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/snapshot/corrupt-repository-widget.asciidoc[] [[diagnosing-unknown-repositories]] diff --git a/docs/reference/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc b/docs/reference/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc index 4084fdc7ca42c..2496781c0c8f4 100644 --- a/docs/reference/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc +++ b/docs/reference/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc @@ -12,7 +12,7 @@ repeated failures before reporting a warning is controlled by the In the event that an automated {slm} policy execution is experiencing repeated failures, follow these steps to get more information about the problem: -include::{es-repo-dir}/tab-widgets/troubleshooting/snapshot/repeated-snapshot-failures-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/snapshot/repeated-snapshot-failures-widget.asciidoc[] diff --git a/docs/reference/troubleshooting/troubleshooting-shards-capacity.asciidoc b/docs/reference/troubleshooting/troubleshooting-shards-capacity.asciidoc index 6a3a5bc06ce85..bc8fb7290f1ed 100644 --- a/docs/reference/troubleshooting/troubleshooting-shards-capacity.asciidoc +++ b/docs/reference/troubleshooting/troubleshooting-shards-capacity.asciidoc @@ -7,4 +7,4 @@ The current shards capacity of the cluster is available in the <>. -include::{es-repo-dir}/tab-widgets/troubleshooting/troubleshooting-shards-capacity-widget.asciidoc[] +include::{es-ref-dir}/tab-widgets/troubleshooting/troubleshooting-shards-capacity-widget.asciidoc[] diff --git a/docs/reference/upgrade.asciidoc b/docs/reference/upgrade.asciidoc index 8f6d095971ff1..d5057d9b87d85 100644 --- a/docs/reference/upgrade.asciidoc +++ b/docs/reference/upgrade.asciidoc @@ -52,7 +52,7 @@ the REST API. [[upgrade-fips-java17]] === FIPS Compliance and Java 17 -include::{es-repo-dir}/security/fips-java17.asciidoc[] +include::{es-ref-dir}/security/fips-java17.asciidoc[] include::upgrade/archived-settings.asciidoc[] From dbb700c67950c60372a268c8e228c0cefb5ca2b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Wed, 17 Apr 2024 14:39:50 +0200 Subject: [PATCH 074/130] Fix and unmute "Test frequent item sets unsupported types" yml test (#107350) --- .../rest-api-spec/test/ml/frequent_item_sets_agg.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml index db41e0d0efaa1..bc44e8423178c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/frequent_item_sets_agg.yml @@ -7,6 +7,9 @@ setup: indices.create: index: store body: + settings: + number_of_shards: 1 + number_of_replicas: 0 mappings: properties: features: @@ -433,9 +436,6 @@ setup: --- "Test frequent item sets unsupported types": - - skip: - version: "all" - reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/106215" - do: catch: /Field \[geo_point\] of type \[geo_point\] is not supported for aggregation \[frequent_item_sets\]/ search: From adaa4763f3583ea32716a7d9066e347260e15ca4 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 17 Apr 2024 16:04:51 +0200 Subject: [PATCH 075/130] ESQL: Fix missing refs due to pruning renamed grouping columns (#107328) Sometimes, CombineProjections does not correctly update an aggregation's groupings when combining with a preceding projection. Fix this by resolving any aliases used in the groupings and de-duplicating them. --------- Co-authored-by: Andrei Stefan --- docs/changelog/107328.yaml | 7 +++ .../src/main/resources/stats.csv-spec | 17 ++++++ .../esql/optimizer/LogicalPlanOptimizer.java | 38 +++++++++++- .../optimizer/LogicalPlanOptimizerTests.java | 59 +++++++++++++------ 4 files changed, 103 insertions(+), 18 deletions(-) create mode 100644 docs/changelog/107328.yaml diff --git a/docs/changelog/107328.yaml b/docs/changelog/107328.yaml new file mode 100644 index 0000000000000..a608d7567ddef --- /dev/null +++ b/docs/changelog/107328.yaml @@ -0,0 +1,7 @@ +pr: 107328 +summary: "ESQL: Fix missing refs due to pruning renamed grouping columns" +area: ES|QL +type: bug +issues: + - 107083 + - 107166 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 749c44d1f6ece..113124d3a72a8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1586,6 +1586,23 @@ c:l | k1:i | languages:i 10 | null | null ; +evalMultipleOverridingKeysWithAggregateExpr#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| EVAL k = languages, k1 = k +| STATS c = 3*COUNT() BY languages, k, k1, languages +| DROP k +| SORT languages +; + +c:l | k1:i | languages:i +45 | 1 | 1 +57 | 2 | 2 +51 | 3 | 3 +54 | 4 | 4 +63 | 5 | 5 +30 | null | null +; + minWithSortExpression1#[skip:-8.13.99,reason:supported in 8.14] FROM employees | STATS min = min(salary) by languages | SORT min + languages; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 2aaf34a1dd1d8..6dd9bc1b74f57 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -389,6 +389,7 @@ static class CombineProjections extends OptimizerRules.OptimizerRule } @Override + @SuppressWarnings("unchecked") protected LogicalPlan rule(UnaryPlan plan) { LogicalPlan child = plan.child(); @@ -419,7 +420,22 @@ protected LogicalPlan rule(UnaryPlan plan) { // Agg with underlying Project (group by on sub-queries) if (plan instanceof Aggregate a) { if (child instanceof Project p) { - plan = new Aggregate(a.source(), p.child(), a.groupings(), combineProjections(a.aggregates(), p.projections())); + var groupings = a.groupings(); + List groupingAttrs = new ArrayList<>(a.groupings().size()); + for (Expression grouping : groupings) { + if (grouping instanceof Attribute attribute) { + groupingAttrs.add(attribute); + } else { + // After applying ReplaceStatsNestedExpressionWithEval, groupings can only contain attributes. + throw new EsqlIllegalArgumentException("Expected an Attribute, got {}", grouping); + } + } + plan = new Aggregate( + a.source(), + p.child(), + combineUpperGroupingsAndLowerProjections(groupingAttrs, p.projections()), + combineProjections(a.aggregates(), p.projections()) + ); } } @@ -482,6 +498,26 @@ private static List combineProjections( return replaced; } + private static List combineUpperGroupingsAndLowerProjections( + List upperGroupings, + List lowerProjections + ) { + // Collect the alias map for resolving the source (f1 = 1, f2 = f1, etc..) + AttributeMap aliases = new AttributeMap<>(); + for (NamedExpression ne : lowerProjections) { + // Projections are just aliases for attributes, so casting is safe. + aliases.put(ne.toAttribute(), (Attribute) Alias.unwrap(ne)); + } + + // Replace any matching attribute directly with the aliased attribute from the projection. + AttributeSet replaced = new AttributeSet(); + for (Attribute attr : upperGroupings) { + // All substitutions happen before; groupings must be attributes at this point. + replaced.add(aliases.resolve(attr, attr)); + } + return new ArrayList<>(replaced); + } + /** * Replace grouping alias previously contained in the aggregations that might have been projected away. */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 579a998755eb7..ba4f15533d26d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -417,8 +417,8 @@ public void testCombineProjectionWithAggregation() { /** * Expects * Limit[1000[INTEGER]] - * \_Aggregate[[last_name{f}#23, first_name{f}#20, k{r}#4],[SUM(salary{f}#24) AS s, last_name{f}#23, first_name{f}#20, first_n - * ame{f}#20 AS k]] + * \_Aggregate[[last_name{f}#23, first_name{f}#20],[SUM(salary{f}#24) AS s, last_name{f}#23, first_name{f}#20, first_name{f}#2 + * 0 AS k]] * \_EsRelation[test][_meta_field{f}#25, emp_no{f}#19, first_name{f}#20, ..] */ public void testCombineProjectionWithAggregationAndEval() { @@ -432,7 +432,7 @@ public void testCombineProjectionWithAggregationAndEval() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.aggregates()), contains("s", "last_name", "first_name", "k")); - assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name", "k")); + assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } /** @@ -552,6 +552,12 @@ public void testCombineDisjunctionToInFromIn() { assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[first_name{f}#12],[COUNT(salary{f}#16) AS count(salary), first_name{f}#12 AS x]] + * \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + */ public void testCombineProjectionWithPruning() { var plan = plan(""" from test @@ -563,19 +569,17 @@ public void testCombineProjectionWithPruning() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.aggregates()), contains("count(salary)", "x")); - assertThat(Expressions.names(agg.groupings()), contains("x")); + assertThat(Expressions.names(agg.groupings()), contains("first_name")); var alias = as(agg.aggregates().get(1), Alias.class); var field = as(alias.child(), FieldAttribute.class); assertThat(field.name(), is("first_name")); - var group = as(agg.groupings().get(0), Attribute.class); - assertThat(group, is(alias.toAttribute())); var from = as(agg.child(), EsRelation.class); } /** * Expects * Limit[1000[INTEGER]] - * \_Aggregate[[f{r}#7],[SUM(emp_no{f}#15) AS s, COUNT(first_name{f}#16) AS c, first_name{f}#16 AS f]] + * \_Aggregate[[first_name{f}#16],[SUM(emp_no{f}#15) AS s, COUNT(first_name{f}#16) AS c, first_name{f}#16 AS f]] * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] */ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg() { @@ -599,13 +603,13 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUsedInAgg as = as(aggs.get(2), Alias.class); assertThat(Expressions.name(as.child()), is("first_name")); - assertThat(Expressions.names(agg.groupings()), contains("f")); + assertThat(Expressions.names(agg.groupings()), contains("first_name")); } /** * Expects * Limit[1000[INTEGER]] - * \_Aggregate[[f{r}#7],[SUM(emp_no{f}#15) AS s, first_name{f}#16 AS f]] + * \_Aggregate[[first_name{f}#16],[SUM(emp_no{f}#15) AS s, first_name{f}#16 AS f]] * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] */ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUnused() { @@ -625,7 +629,7 @@ public void testCombineProjectionWithAggregationFirstAndAliasedGroupingUnused() as = as(aggs.get(1), Alias.class); assertThat(Expressions.name(as.child()), is("first_name")); - assertThat(Expressions.names(agg.groupings()), contains("f")); + assertThat(Expressions.names(agg.groupings()), contains("first_name")); } /** @@ -2786,6 +2790,27 @@ public void testEliminateDuplicateAggsNonCount() { var source = as(agg.child(), EsRelation.class); } + /** + * Expects + * Limit[1000[INTEGER]] + * \_Aggregate[[salary{f}#12],[salary{f}#12, salary{f}#12 AS x]] + * \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + */ + public void testEliminateDuplicateRenamedGroupings() { + var plan = plan(""" + from test + | eval x = salary + | stats by salary, x + """); + + var limit = as(plan, Limit.class); + var agg = as(limit.child(), Aggregate.class); + var relation = as(agg.child(), EsRelation.class); + + assertThat(Expressions.names(agg.groupings()), contains("salary")); + assertThat(Expressions.names(agg.aggregates()), contains("salary", "x")); + } + /** * Expected * Limit[2[INTEGER]] @@ -2832,7 +2857,7 @@ public void testRenameStatsDropGroup() { /** * Expected * Limit[1000[INTEGER]] - * \_Aggregate[[a{r}#2, bar{r}#8],[COUNT([2a][KEYWORD]) AS baz, b{r}#4 AS bar]] + * \_Aggregate[[a{r}#3, b{r}#5],[COUNT([2a][KEYWORD]) AS baz, b{r}#5 AS bar]] * \_Row[[1[INTEGER] AS a, 2[INTEGER] AS b]] */ public void testMultipleRenameStatsDropGroup() { @@ -2844,15 +2869,15 @@ public void testMultipleRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); - assertThat(Expressions.names(agg.groupings()), contains("a", "bar")); + assertThat(Expressions.names(agg.groupings()), contains("a", "b")); var row = as(agg.child(), Row.class); } /** * Expected * Limit[1000[INTEGER]] - * \_Aggregate[[emp_no{f}#11, bar{r}#4],[MAX(salary{f}#16) AS baz, gender{f}#13 AS bar]] - * \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + * \_Aggregate[[emp_no{f}#14, gender{f}#16],[MAX(salary{f}#19) AS baz, gender{f}#16 AS bar]] + * \_EsRelation[test][_meta_field{f}#20, emp_no{f}#14, first_name{f}#15, ..] */ public void testMultipleRenameStatsDropGroupMultirow() { LogicalPlan plan = optimizedPlan(""" @@ -2863,7 +2888,7 @@ public void testMultipleRenameStatsDropGroupMultirow() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); - assertThat(Expressions.names(agg.groupings()), contains("emp_no", "bar")); + assertThat(Expressions.names(agg.groupings()), contains("emp_no", "gender")); var row = as(agg.child(), EsRelation.class); } @@ -2944,7 +2969,7 @@ public void testIsNotNullConstraintForStatsWithAndOnGrouping() { /** * Expects * Limit[1000[INTEGER]] - * \_Aggregate[[x{r}#4],[SUM(salary{f}#13) AS sum(salary), salary{f}#13 AS x]] + * \_Aggregate[[salary{f}#13],[SUM(salary{f}#13) AS sum(salary), salary{f}#13 AS x]] * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testIsNotNullConstraintForStatsWithAndOnGroupingAlias() { @@ -2956,7 +2981,7 @@ public void testIsNotNullConstraintForStatsWithAndOnGroupingAlias() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); - assertThat(Expressions.names(agg.groupings()), contains("x")); + assertThat(Expressions.names(agg.groupings()), contains("salary")); assertThat(Expressions.names(agg.aggregates()), contains("sum(salary)", "x")); var from = as(agg.child(), EsRelation.class); } From 3df8afbafc7f13c523f37cc68a296b2be8a03b9f Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 17 Apr 2024 15:06:56 +0100 Subject: [PATCH 076/130] AwaitsFix for #107568 --- .../compute/operator/TimeSeriesAggregationOperatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 15c2cb1c57218..6fe19d6eb0e6d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -265,6 +265,7 @@ public void testBasicRateOrdinalBased() { // TODO: in a follow up add support for ordinal based time series grouping operator // (and then remove this test) // (ordinal based can only group by one field and never includes timestamp) + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107568") public void testRandomRateOrdinalBased() { int numPods = between(1, 10); List pods = new ArrayList<>(); From eb6af0e6b549d0810d8b89791556bc791b766652 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 17 Apr 2024 15:29:22 +0100 Subject: [PATCH 077/130] Refactor PathTrie to tidy it up (#107542) --- .../elasticsearch/common/path/PathTrie.java | 138 +++++++----------- .../elasticsearch/rest/RestController.java | 2 +- .../org/elasticsearch/rest/RestUtils.java | 4 +- .../common/path/PathTrieTests.java | 93 ++++-------- 4 files changed, 88 insertions(+), 149 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/path/PathTrie.java b/server/src/main/java/org/elasticsearch/common/path/PathTrie.java index 306304ab016a4..bec7dea645027 100644 --- a/server/src/main/java/org/elasticsearch/common/path/PathTrie.java +++ b/server/src/main/java/org/elasticsearch/common/path/PathTrie.java @@ -10,13 +10,15 @@ import org.elasticsearch.common.collect.Iterators; +import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import java.util.NoSuchElementException; -import java.util.function.BiFunction; +import java.util.function.BinaryOperator; import java.util.function.Supplier; +import java.util.function.UnaryOperator; +import java.util.stream.Stream; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -50,52 +52,43 @@ enum TrieMatchingMode { TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED ); - public interface Decoder { - String decode(String value); - } - - private final Decoder decoder; + private final UnaryOperator decoder; private final TrieNode root; private T rootValue; private static final String SEPARATOR = "/"; private static final String WILDCARD = "*"; - public PathTrie(Decoder decoder) { + public PathTrie(UnaryOperator decoder) { this.decoder = decoder; - root = new TrieNode(SEPARATOR, null, WILDCARD); + root = new TrieNode(SEPARATOR, null); } - public class TrieNode { - private transient String key; - private transient T value; - private final String wildcard; - - private transient String namedWildcard; - + private class TrieNode { + private T value; + private String namedWildcard; private Map children; - private TrieNode(String key, T value, String wildcard) { - this.key = key; - this.wildcard = wildcard; + private TrieNode(String key, T value) { this.value = value; this.children = emptyMap(); if (isNamedWildcard(key)) { - namedWildcard = key.substring(key.indexOf('{') + 1, key.indexOf('}')); + updateNamedWildcard(key); } else { namedWildcard = null; } } - private void updateKeyWithNamedWildcard(String key) { - this.key = key; - String newNamedWildcard = key.substring(key.indexOf('{') + 1, key.indexOf('}')); - if (namedWildcard != null && newNamedWildcard.equals(namedWildcard) == false) { - throw new IllegalArgumentException( - "Trying to use conflicting wildcard names for same path: " + namedWildcard + " and " + newNamedWildcard - ); + private void updateNamedWildcard(String key) { + String newNamedWildcard = key.substring(1, key.length() - 1); + if (newNamedWildcard.equals(namedWildcard) == false) { + if (namedWildcard != null) { + throw new IllegalArgumentException( + "Trying to use conflicting wildcard names for same path: " + namedWildcard + " and " + newNamedWildcard + ); + } + namedWildcard = newNamedWildcard; } - namedWildcard = newNamedWildcard; } private void addInnerChild(String key, TrieNode child) { @@ -110,16 +103,17 @@ private synchronized void insert(String[] path, int index, T value) { String token = path[index]; String key = token; if (isNamedWildcard(token)) { - key = wildcard; + key = WILDCARD; } + TrieNode node = children.get(key); if (node == null) { T nodeValue = index == path.length - 1 ? value : null; - node = new TrieNode(token, nodeValue, wildcard); + node = new TrieNode(token, nodeValue); addInnerChild(key, node); } else { if (isNamedWildcard(token)) { - node.updateKeyWithNamedWildcard(token); + node.updateNamedWildcard(token); } /* * If the target node already exists, but is without a value, @@ -139,22 +133,23 @@ private synchronized void insert(String[] path, int index, T value) { node.insert(path, index + 1, value); } - private synchronized void insertOrUpdate(String[] path, int index, T value, BiFunction updater) { + private synchronized void insertOrUpdate(String[] path, int index, T value, BinaryOperator updater) { if (index >= path.length) return; String token = path[index]; String key = token; if (isNamedWildcard(token)) { - key = wildcard; + key = WILDCARD; } + TrieNode node = children.get(key); if (node == null) { T nodeValue = index == path.length - 1 ? value : null; - node = new TrieNode(token, nodeValue, wildcard); + node = new TrieNode(token, nodeValue); addInnerChild(key, node); } else { if (isNamedWildcard(token)) { - node.updateKeyWithNamedWildcard(token); + node.updateNamedWildcard(token); } /* * If the target node already exists, but is without a value, @@ -173,7 +168,7 @@ private synchronized void insertOrUpdate(String[] path, int index, T value, BiFu } private static boolean isNamedWildcard(String key) { - return key.indexOf('{') != -1 && key.indexOf('}') != -1; + return key.charAt(0) == '{' && key.charAt(key.length() - 1) == '}'; } private String namedWildcard() { @@ -184,7 +179,7 @@ private boolean isNamedWildcard() { return namedWildcard != null; } - public T retrieve(String[] path, int index, Map params, TrieMatchingMode trieMatchingMode) { + private T retrieve(String[] path, int index, Map params, TrieMatchingMode trieMatchingMode) { if (index >= path.length) return null; String token = path[index]; @@ -193,7 +188,7 @@ public T retrieve(String[] path, int index, Map params, TrieMatc if (node == null) { if (trieMatchingMode == TrieMatchingMode.WILDCARD_NODES_ALLOWED) { - node = children.get(wildcard); + node = children.get(WILDCARD); if (node == null) { return null; } @@ -202,7 +197,7 @@ public T retrieve(String[] path, int index, Map params, TrieMatc /* * Allow root node wildcard matches. */ - node = children.get(wildcard); + node = children.get(WILDCARD); if (node == null) { return null; } @@ -211,7 +206,7 @@ public T retrieve(String[] path, int index, Map params, TrieMatc /* * Allow leaf node wildcard matches. */ - node = children.get(wildcard); + node = children.get(WILDCARD); if (node == null) { return null; } @@ -220,32 +215,33 @@ public T retrieve(String[] path, int index, Map params, TrieMatc return null; } } else { + TrieNode wildcardNode; if (index + 1 == path.length && node.value == null - && children.get(wildcard) != null - && EXPLICIT_OR_ROOT_WILDCARD.contains(trieMatchingMode) == false) { + && EXPLICIT_OR_ROOT_WILDCARD.contains(trieMatchingMode) == false + && (wildcardNode = children.get(WILDCARD)) != null) { /* * If we are at the end of the path, the current node does not have a value but * there is a child wildcard node, use the child wildcard node. */ - node = children.get(wildcard); + node = wildcardNode; usedWildcard = true; } else if (index == 1 && node.value == null - && children.get(wildcard) != null - && trieMatchingMode == TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED) { + && trieMatchingMode == TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED + && (wildcardNode = children.get(WILDCARD)) != null) { /* * If we are at the root, and root wildcards are allowed, use the child wildcard * node. */ - node = children.get(wildcard); + node = wildcardNode; usedWildcard = true; } else { - usedWildcard = token.equals(wildcard); + usedWildcard = token.equals(WILDCARD); } } - put(params, node, token); + recordWildcardParam(params, node, token); if (index == (path.length - 1)) { return node.value; @@ -253,9 +249,9 @@ public T retrieve(String[] path, int index, Map params, TrieMatc T nodeValue = node.retrieve(path, index + 1, params, trieMatchingMode); if (nodeValue == null && usedWildcard == false && trieMatchingMode != TrieMatchingMode.EXPLICIT_NODES_ONLY) { - node = children.get(wildcard); + node = children.get(WILDCARD); if (node != null) { - put(params, node, token); + recordWildcardParam(params, node, token); nodeValue = node.retrieve(path, index + 1, params, trieMatchingMode); } } @@ -263,13 +259,13 @@ public T retrieve(String[] path, int index, Map params, TrieMatc return nodeValue; } - private void put(Map params, TrieNode node, String value) { + private void recordWildcardParam(Map params, TrieNode node, String value) { if (params != null && node.isNamedWildcard()) { - params.put(node.namedWildcard(), decoder.decode(value)); + params.put(node.namedWildcard(), decoder.apply(value)); } } - Iterator allNodeValues() { + private Iterator allNodeValues() { final Iterator childrenIterator = Iterators.flatMap(children.values().iterator(), TrieNode::allNodeValues); if (value == null) { return childrenIterator; @@ -277,11 +273,6 @@ Iterator allNodeValues() { return Iterators.concat(Iterators.single(value), childrenIterator); } } - - @Override - public String toString() { - return key; - } } public void insert(String path, T value) { @@ -308,7 +299,7 @@ public void insert(String path, T value) { * * allowing the value to be updated if desired. */ - public void insertOrUpdate(String path, T value, BiFunction updater) { + public void insertOrUpdate(String path, T value, BinaryOperator updater) { String[] strings = path.split(SEPARATOR); if (strings.length == 0) { if (rootValue != null) { @@ -334,8 +325,8 @@ public T retrieve(String path, Map params) { return retrieve(path, params, TrieMatchingMode.WILDCARD_NODES_ALLOWED); } - public T retrieve(String path, Map params, TrieMatchingMode trieMatchingMode) { - if (path.length() == 0) { + T retrieve(String path, Map params, TrieMatchingMode trieMatchingMode) { + if (path.isEmpty()) { return rootValue; } String[] strings = path.split(SEPARATOR); @@ -353,29 +344,12 @@ public T retrieve(String path, Map params, TrieMatchingMode trie } /** - * Returns an iterator of the objects stored in the {@code PathTrie}, using + * Returns a stream of the objects stored in the {@code PathTrie}, using * all possible {@code TrieMatchingMode} modes. The {@code paramSupplier} - * is called between each invocation of {@code next()} to supply a new map - * of parameters. + * is called for each mode to supply a new map of parameters. */ - public Iterator retrieveAll(String path, Supplier> paramSupplier) { - return new Iterator<>() { - - private int mode; - - @Override - public boolean hasNext() { - return mode < TrieMatchingMode.values().length; - } - - @Override - public T next() { - if (hasNext() == false) { - throw new NoSuchElementException("called next() without validating hasNext()! no more modes available"); - } - return retrieve(path, paramSupplier.get(), TrieMatchingMode.values()[mode++]); - } - }; + public Stream retrieveAll(String path, Supplier> paramSupplier) { + return Arrays.stream(TrieMatchingMode.values()).map(m -> retrieve(path, paramSupplier.get(), m)); } public Iterator allNodeValues() { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index d197fe50d60d5..8ce9b08eba205 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -644,7 +644,7 @@ Iterator getAllHandlers(@Nullable Map requestPar // we use rawPath since we don't want to decode it while processing the path resolution // so we can handle things like: // my_index/my_type/http%3A%2F%2Fwww.google.com - return handlers.retrieveAll(rawPath, paramsSupplier); + return handlers.retrieveAll(rawPath, paramsSupplier).iterator(); } /** diff --git a/server/src/main/java/org/elasticsearch/rest/RestUtils.java b/server/src/main/java/org/elasticsearch/rest/RestUtils.java index aa693f38a3e6d..4aa82f5e4b7c5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestUtils.java +++ b/server/src/main/java/org/elasticsearch/rest/RestUtils.java @@ -9,7 +9,6 @@ package org.elasticsearch.rest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.core.Booleans; import java.nio.charset.Charset; @@ -17,6 +16,7 @@ import java.util.Arrays; import java.util.Map; import java.util.Optional; +import java.util.function.UnaryOperator; import java.util.regex.Pattern; import static org.elasticsearch.rest.RestRequest.PATH_RESTRICTED; @@ -28,7 +28,7 @@ public class RestUtils { */ private static final boolean DECODE_PLUS_AS_SPACE = Booleans.parseBoolean(System.getProperty("es.rest.url_plus_as_space", "false")); - public static final PathTrie.Decoder REST_DECODER = RestUtils::decodeComponent; + public static final UnaryOperator REST_DECODER = RestUtils::decodeComponent; public static void decodeQueryString(String s, int fromIndex, Map params) { if (fromIndex < 0) { diff --git a/server/src/test/java/org/elasticsearch/common/path/PathTrieTests.java b/server/src/test/java/org/elasticsearch/common/path/PathTrieTests.java index 6b2e0122a599a..39eaf0bb8c57e 100644 --- a/server/src/test/java/org/elasticsearch/common/path/PathTrieTests.java +++ b/server/src/test/java/org/elasticsearch/common/path/PathTrieTests.java @@ -13,20 +13,16 @@ import org.elasticsearch.test.ESTestCase; import java.util.HashMap; -import java.util.Iterator; import java.util.Map; +import java.util.function.UnaryOperator; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class PathTrieTests extends ESTestCase { - public static final PathTrie.Decoder NO_DECODER = new PathTrie.Decoder() { - @Override - public String decode(String value) { - return value; - } - }; + public static final UnaryOperator NO_DECODER = UnaryOperator.identity(); public void testPath() { PathTrie trie = new PathTrie<>(NO_DECODER); @@ -50,9 +46,7 @@ public void testPath() { Map params = new HashMap<>(); assertThat(trie.retrieve("index1/insert/12", params), equalTo("bingo")); - assertThat(params.size(), equalTo(2)); - assertThat(params.get("index"), equalTo("index1")); - assertThat(params.get("docId"), equalTo("12")); + assertThat(params, equalTo(Map.of("index", "index1", "docId", "12"))); } public void testEmptyPath() { @@ -68,11 +62,11 @@ public void testDifferentNamesOnDifferentPath() { Map params = new HashMap<>(); assertThat(trie.retrieve("/a/test", params), equalTo("test1")); - assertThat(params.get("type"), equalTo("test")); + assertThat(params, equalTo(Map.of("type", "test"))); params.clear(); assertThat(trie.retrieve("/b/testX", params), equalTo("test2")); - assertThat(params.get("name"), equalTo("testX")); + assertThat(params, equalTo(Map.of("name", "testX"))); } public void testSameNameOnDifferentPath() { @@ -82,11 +76,11 @@ public void testSameNameOnDifferentPath() { Map params = new HashMap<>(); assertThat(trie.retrieve("/a/c/test", params), equalTo("test1")); - assertThat(params.get("name"), equalTo("test")); + assertThat(params, equalTo(Map.of("name", "test"))); params.clear(); assertThat(trie.retrieve("/b/testX", params), equalTo("test2")); - assertThat(params.get("name"), equalTo("testX")); + assertThat(params, equalTo(Map.of("name", "testX"))); } public void testPreferNonWildcardExecution() { @@ -125,56 +119,31 @@ public void testWildcardMatchingModes() { assertThat(trie.retrieve("/a", params, TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED), equalTo("test1")); assertThat(trie.retrieve("/a", params, TrieMatchingMode.WILDCARD_LEAF_NODES_ALLOWED), equalTo("test1")); assertThat(trie.retrieve("/a", params, TrieMatchingMode.WILDCARD_NODES_ALLOWED), equalTo("test1")); - Iterator allPaths = trie.retrieveAll("/a", () -> params); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo("test1")); - assertThat(allPaths.next(), equalTo("test1")); - assertThat(allPaths.next(), equalTo("test1")); - assertFalse(allPaths.hasNext()); + assertThat(trie.retrieveAll("/a", () -> params).toList(), contains(null, "test1", "test1", "test1")); assertThat(trie.retrieve("/a/b", params, TrieMatchingMode.EXPLICIT_NODES_ONLY), nullValue()); assertThat(trie.retrieve("/a/b", params, TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED), equalTo("test4")); assertThat(trie.retrieve("/a/b", params, TrieMatchingMode.WILDCARD_LEAF_NODES_ALLOWED), equalTo("test3")); assertThat(trie.retrieve("/a/b", params, TrieMatchingMode.WILDCARD_NODES_ALLOWED), equalTo("test3")); - allPaths = trie.retrieveAll("/a/b", () -> params); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo("test4")); - assertThat(allPaths.next(), equalTo("test3")); - assertThat(allPaths.next(), equalTo("test3")); - assertFalse(allPaths.hasNext()); + assertThat(trie.retrieveAll("/a/b", () -> params).toList(), contains(null, "test4", "test3", "test3")); assertThat(trie.retrieve("/a/b/c", params, TrieMatchingMode.EXPLICIT_NODES_ONLY), nullValue()); assertThat(trie.retrieve("/a/b/c", params, TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED), equalTo("test5")); assertThat(trie.retrieve("/a/b/c", params, TrieMatchingMode.WILDCARD_LEAF_NODES_ALLOWED), equalTo("test7")); assertThat(trie.retrieve("/a/b/c", params, TrieMatchingMode.WILDCARD_NODES_ALLOWED), equalTo("test7")); - allPaths = trie.retrieveAll("/a/b/c", () -> params); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo("test5")); - assertThat(allPaths.next(), equalTo("test7")); - assertThat(allPaths.next(), equalTo("test7")); - assertFalse(allPaths.hasNext()); + assertThat(trie.retrieveAll("/a/b/c", () -> params).toList(), contains(null, "test5", "test7", "test7")); assertThat(trie.retrieve("/x/y/z", params, TrieMatchingMode.EXPLICIT_NODES_ONLY), nullValue()); assertThat(trie.retrieve("/x/y/z", params, TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED), nullValue()); assertThat(trie.retrieve("/x/y/z", params, TrieMatchingMode.WILDCARD_LEAF_NODES_ALLOWED), nullValue()); assertThat(trie.retrieve("/x/y/z", params, TrieMatchingMode.WILDCARD_NODES_ALLOWED), equalTo("test9")); - allPaths = trie.retrieveAll("/x/y/z", () -> params); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo("test9")); - assertFalse(allPaths.hasNext()); + assertThat(trie.retrieveAll("/x/y/z", () -> params).toList(), contains(null, null, null, "test9")); assertThat(trie.retrieve("/d/e/f", params, TrieMatchingMode.EXPLICIT_NODES_ONLY), nullValue()); assertThat(trie.retrieve("/d/e/f", params, TrieMatchingMode.WILDCARD_ROOT_NODES_ALLOWED), nullValue()); assertThat(trie.retrieve("/d/e/f", params, TrieMatchingMode.WILDCARD_LEAF_NODES_ALLOWED), nullValue()); assertThat(trie.retrieve("/d/e/f", params, TrieMatchingMode.WILDCARD_NODES_ALLOWED), equalTo("test10")); - allPaths = trie.retrieveAll("/d/e/f", () -> params); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo(null)); - assertThat(allPaths.next(), equalTo("test10")); - assertFalse(allPaths.hasNext()); + assertThat(trie.retrieveAll("/d/e/f", () -> params).toList(), contains(null, null, null, "test10")); } // https://github.com/elastic/elasticsearch/pull/17916 @@ -208,13 +177,11 @@ public void testSamePathConcreteResolution() { Map params = new HashMap<>(); assertThat(trie.retrieve("/a/b/c", params), equalTo("test1")); - assertThat(params.get("x"), equalTo("a")); - assertThat(params.get("y"), equalTo("b")); - assertThat(params.get("z"), equalTo("c")); + assertThat(params, equalTo(Map.of("x", "a", "y", "b", "z", "c"))); + params.clear(); assertThat(trie.retrieve("/a/_y/c", params), equalTo("test2")); - assertThat(params.get("x"), equalTo("a")); - assertThat(params.get("k"), equalTo("c")); + assertThat(params, equalTo(Map.of("x", "a", "k", "c"))); } public void testNamedWildcardAndLookupWithWildcard() { @@ -227,23 +194,23 @@ public void testNamedWildcardAndLookupWithWildcard() { Map params = new HashMap<>(); assertThat(trie.retrieve("/x/*", params), equalTo("test1")); - assertThat(params.get("test"), equalTo("*")); + assertThat(params, equalTo(Map.of("test", "*"))); - params = new HashMap<>(); + params.clear(); assertThat(trie.retrieve("/b/a", params), equalTo("test2")); - assertThat(params.get("test"), equalTo("b")); + assertThat(params, equalTo(Map.of("test", "b"))); - params = new HashMap<>(); + params.clear(); assertThat(trie.retrieve("/*", params), equalTo("test3")); - assertThat(params.get("test"), equalTo("*")); + assertThat(params, equalTo(Map.of("test", "*"))); - params = new HashMap<>(); + params.clear(); assertThat(trie.retrieve("/*/_endpoint", params), equalTo("test4")); - assertThat(params.get("test"), equalTo("*")); + assertThat(params, equalTo(Map.of("test", "*"))); - params = new HashMap<>(); + params.clear(); assertThat(trie.retrieve("a/*/_endpoint", params), equalTo("test5")); - assertThat(params.get("test"), equalTo("*")); + assertThat(params, equalTo(Map.of("test", "*"))); } // https://github.com/elastic/elasticsearch/issues/14177 @@ -252,14 +219,12 @@ public void testEscapedSlashWithinUrl() { PathTrie pathTrie = new PathTrie<>(RestUtils.REST_DECODER); pathTrie.insert("/{index}/{type}/{id}", "test"); HashMap params = new HashMap<>(); + assertThat(pathTrie.retrieve("/index/type/a%2Fe", params), equalTo("test")); - assertThat(params.get("index"), equalTo("index")); - assertThat(params.get("type"), equalTo("type")); - assertThat(params.get("id"), equalTo("a/e")); + assertThat(params, equalTo(Map.of("index", "index", "type", "type", "id", "a/e"))); + params.clear(); assertThat(pathTrie.retrieve("//type/id", params), equalTo("test")); - assertThat(params.get("index"), equalTo("")); - assertThat(params.get("type"), equalTo("type")); - assertThat(params.get("id"), equalTo("id")); + assertThat(params, equalTo(Map.of("index", "", "type", "type", "id", "id"))); } } From 5f5947799b94299b9f9f46483a81641be9400206 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Wed, 17 Apr 2024 11:08:51 -0500 Subject: [PATCH 078/130] Adding a putIfAbsent() method on EnrichCache (#107499) --- .../xpack/enrich/EnrichCache.java | 29 +++++ .../xpack/enrich/EnrichProcessorFactory.java | 19 ++- .../xpack/enrich/EnrichCacheTests.java | 118 ++++++++++++++++++ 3 files changed, 156 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index 749bce59a4bbb..722328b6b76d6 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.enrich; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -24,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.BiConsumer; /** * A simple cache for enrich that uses {@link Cache}. There is one instance of this cache and @@ -52,6 +54,32 @@ public final class EnrichCache { this.cache = CacheBuilder.>>builder().setMaximumWeight(maxSize).build(); } + /** + * This method notifies the given listener of the value in this cache for the given searchRequest. If there is no value in the cache + * for the searchRequest, then the new cache value is computed using searchResponseFetcher. + * @param searchRequest The key for the cache request + * @param searchResponseFetcher The function used to compute the value to be put in the cache, if there is no value in the cache already + * @param listener A listener to be notified of the value in the cache + */ + public void computeIfAbsent( + SearchRequest searchRequest, + BiConsumer> searchResponseFetcher, + ActionListener>> listener + ) { + // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. + List> response = get(searchRequest); + if (response != null) { + listener.onResponse(response); + } else { + searchResponseFetcher.accept(searchRequest, ActionListener.wrap(resp -> { + List> value = toCacheValue(resp); + put(searchRequest, value); + listener.onResponse(deepCopy(value, false)); + }, listener::onFailure)); + } + } + + // non-private for unit testing only List> get(SearchRequest searchRequest) { String enrichIndex = getEnrichIndexKey(searchRequest); CacheKey cacheKey = new CacheKey(enrichIndex, searchRequest); @@ -64,6 +92,7 @@ public final class EnrichCache { } } + // non-private for unit testing only void put(SearchRequest searchRequest, List> response) { String enrichIndex = getEnrichIndexKey(searchRequest); CacheKey cacheKey = new CacheKey(enrichIndex, searchRequest); diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java index 907ebb0c9ce3a..9890a96aae820 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java @@ -131,16 +131,15 @@ public void accept(ClusterState state) { Client originClient = new OriginSettingClient(client, ENRICH_ORIGIN); return (req, handler) -> { // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. - List> response = enrichCache.get(req); - if (response != null) { - handler.accept(response, null); - } else { - originClient.execute(EnrichCoordinatorProxyAction.INSTANCE, req, ActionListener.wrap(resp -> { - List> value = EnrichCache.toCacheValue(resp); - enrichCache.put(req, value); - handler.accept(EnrichCache.deepCopy(value, false), null); - }, e -> { handler.accept(null, e); })); - } + enrichCache.computeIfAbsent( + req, + (searchRequest, searchResponseActionListener) -> originClient.execute( + EnrichCoordinatorProxyAction.INSTANCE, + searchRequest, + searchResponseActionListener + ), + ActionListener.wrap(resp -> handler.accept(resp, null), e -> handler.accept(null, e)) + ); }; } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java index 735d68f61416e..fe3c3b3e467ef 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java @@ -6,21 +6,31 @@ */ package org.elasticsearch.xpack.enrich; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -138,6 +148,114 @@ public void testCaching() { assertThat(cacheStats.getEvictions(), equalTo(4L)); } + public void testPutIfAbsent() throws InterruptedException { + // Emulate cluster metadata: + // (two enrich indices with corresponding alias entries) + var metadata = Metadata.builder() + .put( + IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-1") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) + ) + .put( + IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-1") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) + ) + .build(); + + // Emulated search requests that an enrich processor could generate: + // (two unique searches for two enrich policies) + var searchRequest1 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( + new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) + ); + final List> searchResponseMap = List.of( + Map.of("key1", "value1", "key2", "value2"), + Map.of("key3", "value3", "key4", "value4") + ); + EnrichCache enrichCache = new EnrichCache(3); + enrichCache.setMetadata(metadata); + + { + CountDownLatch queriedDatabaseLatch = new CountDownLatch(1); + CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); + enrichCache.computeIfAbsent(searchRequest1, (searchRequest, searchResponseActionListener) -> { + SearchResponse searchResponse = convertToSearchResponse(searchResponseMap); + searchResponseActionListener.onResponse(searchResponse); + searchResponse.decRef(); + queriedDatabaseLatch.countDown(); + }, new ActionListener<>() { + @Override + public void onResponse(List> response) { + assertThat(response, equalTo(searchResponseMap)); + notifiedOfResultLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); + assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true)); + assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true)); + } + + { + CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); + enrichCache.computeIfAbsent(searchRequest1, (searchRequest, searchResponseActionListener) -> { + fail("Expected no call to the database because item should have been in the cache"); + }, new ActionListener<>() { + @Override + public void onResponse(List> maps) { + notifiedOfResultLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); + assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true)); + } + } + + private SearchResponse convertToSearchResponse(List> searchResponseList) { + SearchHit[] hitArray = searchResponseList.stream().map(map -> { + try { + return SearchHit.unpooled(0, "id").sourceRef(convertMapToJson(map)); + } catch (IOException e) { + throw new RuntimeException(e); + } + }).toArray(SearchHit[]::new); + SearchHits hits = SearchHits.unpooled(hitArray, null, 0); + return new SearchResponse( + hits, + null, + null, + false, + false, + null, + 1, + null, + 5, + 4, + 0, + randomLong(), + null, + SearchResponse.Clusters.EMPTY + ); + } + + private BytesReference convertMapToJson(Map simpleMap) throws IOException { + try (XContentBuilder builder = JsonXContent.contentBuilder().map(simpleMap)) { + return BytesReference.bytes(builder); + } + } + public void testDeepCopy() { Map original = new HashMap<>(); { From f28529d237f1fc75214e02ebdbecad57797ee120 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 17 Apr 2024 09:33:21 -0700 Subject: [PATCH 079/130] Handle infinity during synthetic source construction for scaled float field (#107494) For really large values, rounding error is enough to push the reconstructed value for synthetic source into infinity. Existing code didn't take it into account. This PR adds a check to detect infinity and just proceed with returning it as is in synthetic source. Closes #107101. --- docs/changelog/107494.yaml | 6 ++++ .../mapper/extras/ScaledFloatFieldMapper.java | 14 ++++++++ .../extras/ScaledFloatFieldMapperTests.java | 35 ++++++++++++++++++- 3 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/107494.yaml diff --git a/docs/changelog/107494.yaml b/docs/changelog/107494.yaml new file mode 100644 index 0000000000000..1d71ce284a4a8 --- /dev/null +++ b/docs/changelog/107494.yaml @@ -0,0 +1,6 @@ +pr: 107494 +summary: Handle infinity during synthetic source construction for scaled float field +area: Mapping +type: bug +issues: + - 107101 diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index 09507ae926f44..cb17503579e32 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -749,6 +749,20 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { */ static double decodeForSyntheticSource(long scaledValue, double scalingFactor) { double v = scaledValue / scalingFactor; + + // If original double value is close to MAX_VALUE + // and rounding is performed in the direction of the same infinity + // it is possible to "overshoot" infinity during reconstruction. + // E.g. for a value close to Double.MAX_VALUE "true" scaled value is 10.5 + // and with rounding it becomes 11. + // Now, because of that rounding difference, 11 divided by scaling factor goes into infinity. + // There is nothing we can do about it so we'll return the closest finite value to infinity + // which is MAX_VALUE. + if (Double.isInfinite(v)) { + var sign = v == Double.POSITIVE_INFINITY ? 1 : -1; + return sign * Double.MAX_VALUE; + } + long reenc = Math.round(v * scalingFactor); if (reenc != scaledValue) { if (reenc > scaledValue) { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java index d6eb55dfb23e4..253df4de999db 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java @@ -395,6 +395,12 @@ private Tuple generateValue() { private double round(double d) { long encoded = Math.round(d * scalingFactor); double decoded = encoded / scalingFactor; + // Special case due to rounding, see implementation. + if (Double.isInfinite(decoded)) { + var sign = decoded == Double.POSITIVE_INFINITY ? 1 : -1; + return sign * Double.MAX_VALUE; + } + long reencoded = Math.round(decoded * scalingFactor); if (encoded != reencoded) { if (encoded > reencoded) { @@ -406,6 +412,11 @@ private double round(double d) { } private double roundDocValues(double d) { + // Special case due to rounding, see implementation. + if (Math.abs(d) == Double.MAX_VALUE) { + return d; + } + long encoded = Math.round(d * scalingFactor); return encoded * (1 / scalingFactor); } @@ -526,7 +537,7 @@ public void testEncodeDecodeSaturatedLow() { } /** - * Tests that numbers whose encoded value is {@code Long.MIN_VALUE} can be round + * Tests that numbers whose encoded value is {@code Long.MAX_VALUE} can be round * tripped through synthetic source. */ public void testEncodeDecodeSaturatedHigh() { @@ -580,6 +591,28 @@ public void testDecodeEncode() { ); } + /** + * Tests the case when decoded value is infinite due to rounding. + */ + public void testDecodeHandlingInfinity() { + for (var sign : new long[] { 1, -1 }) { + long encoded = 101; + double encodedNoRounding = 100.5; + assertEquals(encoded, Math.round(encodedNoRounding)); + + var signedMax = sign * Double.MAX_VALUE; + // We need a scaling factor that will + // 1. make encoded long small resulting in significant loss of precision due to rounding + // 2. result in long value being rounded in correct direction. + // + // So we take a scaling factor that would put us right at MAX_VALUE + // without rounding and hence go beyond MAX_VALUE with rounding. + double scalingFactor = (encodedNoRounding / signedMax); + + assertThat(ScaledFloatFieldMapper.decodeForSyntheticSource(encoded, scalingFactor), equalTo(signedMax)); + } + } + private double encodeDecode(double value, double scalingFactor) { return ScaledFloatFieldMapper.decodeForSyntheticSource(ScaledFloatFieldMapper.encode(value, scalingFactor), scalingFactor); } From cc753389c1da13a44c064718989dfcc70d92ed6c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 17 Apr 2024 17:04:59 +0000 Subject: [PATCH 080/130] Bump to version 8.15.0 --- .backportrc.json | 4 +-- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 ++++++++++++ .buildkite/pipelines/periodic.yml | 14 ++++++++-- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 1 + build-tools-internal/version.properties | 2 +- docs/reference/migration/index.asciidoc | 2 ++ .../reference/migration/migrate_8_15.asciidoc | 20 ++++++++++++++ docs/reference/release-notes.asciidoc | 2 ++ docs/reference/release-notes/8.15.0.asciidoc | 8 ++++++ .../release-notes/highlights.asciidoc | 26 ++++++------------- .../main/java/org/elasticsearch/Version.java | 3 ++- 13 files changed, 76 insertions(+), 25 deletions(-) create mode 100644 docs/reference/migration/migrate_8_15.asciidoc create mode 100644 docs/reference/release-notes/8.15.0.asciidoc diff --git a/.backportrc.json b/.backportrc.json index cb8aa183f7bf9..59843f4d5f134 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,9 +1,9 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { - "^v8.14.0$" : "main", + "^v8.15.0$" : "main", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } \ No newline at end of file diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 18a93c9b63a3e..8103b40cbaff0 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index c306e1d9f63cb..347b7ddde752e 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -561,6 +561,22 @@ steps: env: BWC_VERSION: 8.14.0 + - label: "{{matrix.image}} / 8.15.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.15.0 + - group: packaging-tests-windows steps: - label: "{{matrix.image}} / packaging-tests-windows" diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3410436eda2bf..9291ec2efcbd9 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -332,6 +332,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.14.0 + - label: 8.15.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.15.0 - label: concurrent-search-tests command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 @@ -396,7 +406,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -438,7 +448,7 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0"] + BWC_VERSION: ["7.17.21", "8.13.3", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 46165da472e74..32a5ef8f8d1e5 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -32,3 +32,4 @@ BWC_VERSION: - "8.12.2" - "8.13.3" - "8.14.0" + - "8.15.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index dfd238a041b1e..6ee9691a9e5ee 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -2,3 +2,4 @@ BWC_VERSION: - "7.17.21" - "8.13.3" - "8.14.0" + - "8.15.0" diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0883097e75aad..d3d528cbff494 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,4 +1,4 @@ -elasticsearch = 8.14.0 +elasticsearch = 8.15.0 lucene = 9.10.0 bundled_jdk_vendor = openjdk diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index c524380547839..51a2898b5d598 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -1,5 +1,6 @@ include::migration_intro.asciidoc[] +* <> * <> * <> * <> @@ -16,6 +17,7 @@ include::migration_intro.asciidoc[] * <> * <> +include::migrate_8_15.asciidoc[] include::migrate_8_14.asciidoc[] include::migrate_8_13.asciidoc[] include::migrate_8_12.asciidoc[] diff --git a/docs/reference/migration/migrate_8_15.asciidoc b/docs/reference/migration/migrate_8_15.asciidoc new file mode 100644 index 0000000000000..a183e68a50693 --- /dev/null +++ b/docs/reference/migration/migrate_8_15.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.15]] +== Migrating to 8.15 +++++ +8.15 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.15. + +See also <> and <>. + +coming::[8.15.0] + + +[discrete] +[[breaking-changes-8.15]] +=== Breaking changes + +There are no breaking changes in {es} 8.15. + diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 05c97d51a38e7..3cef5cc88bbb7 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,6 +6,7 @@ This section summarizes the changes in each release. +* <> * <> * <> * <> @@ -64,6 +65,7 @@ This section summarizes the changes in each release. -- +include::release-notes/8.15.0.asciidoc[] include::release-notes/8.14.0.asciidoc[] include::release-notes/8.13.2.asciidoc[] include::release-notes/8.13.1.asciidoc[] diff --git a/docs/reference/release-notes/8.15.0.asciidoc b/docs/reference/release-notes/8.15.0.asciidoc new file mode 100644 index 0000000000000..97f4a51a1142f --- /dev/null +++ b/docs/reference/release-notes/8.15.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.15.0]] +== {es} version 8.15.0 + +coming[8.15.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 8d9d743a239f5..8c1590d17288f 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -11,7 +11,8 @@ For detailed information about this release, see the <> and // Add previous release to the list Other versions: -{ref-bare}/8.13/release-highlights.html[8.13] +{ref-bare}/8.14/release-highlights.html[8.14] +| {ref-bare}/8.13/release-highlights.html[8.13] | {ref-bare}/8.12/release-highlights.html[8.12] | {ref-bare}/8.11/release-highlights.html[8.11] | {ref-bare}/8.10/release-highlights.html[8.10] @@ -28,24 +29,13 @@ Other versions: endif::[] +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: // tag::notable-highlights[] - -[discrete] -[[add_global_retention_in_data_stream_lifecycle]] -=== Add global retention in data stream lifecycle -Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention -allows us to configure two different retentions: - -- `default_retention` is applied to all data streams managed by the data stream lifecycle that do not have retention -defined on the data stream level. -- `max_retention` is applied to all data streams managed by the data stream lifecycle and it allows any data stream -data to be deleted after the `max_retention` has passed. - -Furthermore, we introduce the term `effective_retention` which is the retention applied at a certain moment to a data -stream considering all the available retention configurations. - -{es-pull}105682[#105682] - +// [discrete] +// === Heading +// +// Description. // end::notable-highlights[] diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 88a1049a42557..ab7b26570a665 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -173,7 +173,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_2 = new Version(8_13_02_99); public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_14_0 = new Version(8_14_00_99); - public static final Version CURRENT = V_8_14_0; + public static final Version V_8_15_0 = new Version(8_15_00_99); + public static final Version CURRENT = V_8_15_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; From 1aa182a2733359c1feafc82fdc78b00bc1d24a12 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 17 Apr 2024 13:28:00 -0400 Subject: [PATCH 081/130] [ML] Inference API time to reserve tokens for rate limiter (#107571) * Refactoring tests * Adding time to reserve tests --- .../xpack/inference/common/RateLimiter.java | 58 +++++-- ...erTests.java => BaseRateLimiterTests.java} | 100 ++++++------ .../common/RateLimiterAcquireTests.java | 32 ++++ .../common/RateLimiterReserveTests.java | 30 ++++ .../common/RateLimiterTimeToReserveTests.java | 142 ++++++++++++++++++ 5 files changed, 307 insertions(+), 55 deletions(-) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/{RateLimiterTests.java => BaseRateLimiterTests.java} (67%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterAcquireTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterReserveTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTimeToReserveTests.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java index ac28aa87f554b..bbc5082d45004 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.common; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; import java.time.Clock; import java.time.Instant; @@ -92,24 +93,59 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double * @throws InterruptedException _ */ public void acquire(int tokens) throws InterruptedException { + sleeper.sleep(reserveInternal(tokens)); + } + + /** + * Returns the amount of time to wait for the tokens to become available but does not reserve them in advance. + * A caller will need to call {@link #reserve(int)} or {@link #acquire(int)} after this call. + * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here. Must be greater than 0. + * @return the amount of time to wait + */ + public TimeValue timeToReserve(int tokens) { + var timeToReserveRes = timeToReserveInternal(tokens); + + return new TimeValue((long) timeToReserveRes.microsToWait, TimeUnit.MICROSECONDS); + } + + private TimeToReserve timeToReserveInternal(int tokens) { + validateTokenRequest(tokens); + + double microsToWait; + accumulateTokens(); + var accumulatedTokensToUse = Math.min(tokens, accumulatedTokens); + var additionalTokensRequired = tokens - accumulatedTokensToUse; + microsToWait = additionalTokensRequired / tokensPerMicros; + + return new TimeToReserve(microsToWait, accumulatedTokensToUse); + } + + private record TimeToReserve(double microsToWait, double accumulatedTokensToUse) {} + + private static void validateTokenRequest(int tokens) { if (tokens <= 0) { throw new IllegalArgumentException("Requested tokens must be positive"); } + } - double microsToWait; - synchronized (this) { - accumulateTokens(); - var accumulatedTokensToUse = Math.min(tokens, accumulatedTokens); - var additionalTokensRequired = tokens - accumulatedTokensToUse; - microsToWait = additionalTokensRequired / tokensPerMicros; - accumulatedTokens -= accumulatedTokensToUse; - nextTokenAvailability = nextTokenAvailability.plus((long) microsToWait, ChronoUnit.MICROS); - } + /** + * Returns the amount of time to wait for the tokens to become available. + * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here. Must be greater than 0. + * @return the amount of time to wait + */ + public TimeValue reserve(int tokens) { + return new TimeValue(reserveInternal(tokens), TimeUnit.MICROSECONDS); + } + + private synchronized long reserveInternal(int tokens) { + var timeToReserveRes = timeToReserveInternal(tokens); + accumulatedTokens -= timeToReserveRes.accumulatedTokensToUse; + nextTokenAvailability = nextTokenAvailability.plus((long) timeToReserveRes.microsToWait, ChronoUnit.MICROS); - sleeper.sleep((long) microsToWait); + return (long) timeToReserveRes.microsToWait; } - private void accumulateTokens() { + private synchronized void accumulateTokens() { var now = Instant.now(clock); if (now.isAfter(nextTokenAvailability)) { var elapsedTimeMicros = microsBetweenExact(nextTokenAvailability, now); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/BaseRateLimiterTests.java similarity index 67% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/BaseRateLimiterTests.java index 46931f12aaf4f..d012f135839c6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/BaseRateLimiterTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.common; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import java.time.Clock; @@ -17,11 +18,19 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -public class RateLimiterTests extends ESTestCase { +public abstract class BaseRateLimiterTests extends ESTestCase { + + protected abstract TimeValue tokenMethod(RateLimiter limiter, int tokens) throws InterruptedException; + + protected abstract void sleepValidationMethod( + TimeValue result, + RateLimiter.Sleeper mockSleeper, + int numberOfClassToExpect, + long expectedMicrosecondsToSleep + ) throws InterruptedException; + public void testThrows_WhenAccumulatedTokensLimit_IsNegative() { var exception = expectThrows( IllegalArgumentException.class, @@ -65,19 +74,19 @@ public void testThrows_WhenTokensPerTimeUnit_IsNegative() { assertThat(exception.getMessage(), is("Tokens per time unit must be greater than 0")); } - public void testAcquire_Throws_WhenTokens_IsZero() { + public void testMethod_Throws_WhenTokens_IsZero() { var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()); var exception = expectThrows(IllegalArgumentException.class, () -> limiter.acquire(0)); assertThat(exception.getMessage(), is("Requested tokens must be positive")); } - public void testAcquire_Throws_WhenTokens_IsNegative() { + public void testMethod_Throws_WhenTokens_IsNegative() { var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()); var exception = expectThrows(IllegalArgumentException.class, () -> limiter.acquire(-1)); assertThat(exception.getMessage(), is("Requested tokens must be positive")); } - public void testAcquire_First_CallDoesNotSleep() throws InterruptedException { + public void testMethod_First_CallDoesNotSleep() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -85,11 +94,11 @@ public void testAcquire_First_CallDoesNotSleep() throws InterruptedException { var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(0); + var res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, 0); } - public void testAcquire_DoesNotSleep_WhenTokenRateIsHigh() throws InterruptedException { + public void testMethod_DoesNotSleep_WhenTokenRateIsHigh() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -97,11 +106,11 @@ public void testAcquire_DoesNotSleep_WhenTokenRateIsHigh() throws InterruptedExc var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(0, Double.MAX_VALUE, TimeUnit.MICROSECONDS, sleeper, clock); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(0); + var res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, 0); } - public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsHigh() throws InterruptedException { + public void testMethod_AcceptsMaxIntValue_WhenTokenRateIsHigh() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -109,11 +118,11 @@ public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsHigh() throws Interrup var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(0, Double.MAX_VALUE, TimeUnit.MICROSECONDS, sleeper, clock); - limiter.acquire(Integer.MAX_VALUE); - verify(sleeper, times(1)).sleep(0); + var res = tokenMethod(limiter, Integer.MAX_VALUE); + sleepValidationMethod(res, sleeper, 1, 0); } - public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsLow() throws InterruptedException { + public void testMethod_AcceptsMaxIntValue_WhenTokenRateIsLow() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -122,13 +131,13 @@ public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsLow() throws Interrupt double tokensPerDay = 1; var limiter = new RateLimiter(0, tokensPerDay, TimeUnit.DAYS, sleeper, clock); - limiter.acquire(Integer.MAX_VALUE); + var res = tokenMethod(limiter, Integer.MAX_VALUE); double tokensPerMicro = tokensPerDay / TimeUnit.DAYS.toMicros(1); - verify(sleeper, times(1)).sleep((long) ((double) Integer.MAX_VALUE / tokensPerMicro)); + sleepValidationMethod(res, sleeper, 1, (long) ((double) Integer.MAX_VALUE / tokensPerMicro)); } - public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken() throws InterruptedException { + public void testMethod_SleepsForOneMinute_WhenRequestingOneUnavailableToken() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -136,11 +145,11 @@ public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken() t var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(2); - verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + var res = tokenMethod(limiter, 2); + sleepValidationMethod(res, sleeper, 1, TimeUnit.MINUTES.toMicros(1)); } - public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken_NoAccumulated() throws InterruptedException { + public void testMethod_SleepsForOneMinute_WhenRequestingOneUnavailableToken_NoAccumulated() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -148,11 +157,11 @@ public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken_NoA var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + var res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, TimeUnit.MINUTES.toMicros(1)); } - public void testAcquire_SleepsFor10Minute_WhenRequesting10UnavailableToken_NoAccumulated() throws InterruptedException { + public void testMethod_SleepsFor10Minute_WhenRequesting10UnavailableToken_NoAccumulated() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -160,11 +169,11 @@ public void testAcquire_SleepsFor10Minute_WhenRequesting10UnavailableToken_NoAcc var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(10); - verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(10)); + var res = tokenMethod(limiter, 10); + sleepValidationMethod(res, sleeper, 1, TimeUnit.MINUTES.toMicros(10)); } - public void testAcquire_IncrementsNextTokenAvailabilityInstant_ByOneMinute() throws InterruptedException { + public void testMethod_IncrementsNextTokenAvailabilityInstant_ByOneMinute() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -172,12 +181,12 @@ public void testAcquire_IncrementsNextTokenAvailabilityInstant_ByOneMinute() thr var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + var res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, TimeUnit.MINUTES.toMicros(1)); assertThat(limiter.getNextTokenAvailability(), is(now.plus(1, ChronoUnit.MINUTES))); } - public void testAcquire_SecondCallToAcquire_ShouldWait_WhenAccumulatedTokensAreDepleted() throws InterruptedException { + public void testMethod_SecondCallToAcquire_ShouldWait_WhenAccumulatedTokensAreDepleted() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -185,13 +194,14 @@ public void testAcquire_SecondCallToAcquire_ShouldWait_WhenAccumulatedTokensAreD var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(0); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + + var res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, 0); + res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, TimeUnit.MINUTES.toMicros(1)); } - public void testAcquire_SecondCallToAcquire_ShouldWaitForHalfDuration_WhenElapsedTimeIsHalfRequiredDuration() + public void testMethod_SecondCallToAcquire_ShouldWaitForHalfDuration_WhenElapsedTimeIsHalfRequiredDuration() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); @@ -200,14 +210,15 @@ public void testAcquire_SecondCallToAcquire_ShouldWaitForHalfDuration_WhenElapse var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(0); + + var res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, 0); when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(30))); - limiter.acquire(1); - verify(sleeper, times(1)).sleep(TimeUnit.SECONDS.toMicros(30)); + res = tokenMethod(limiter, 1); + sleepValidationMethod(res, sleeper, 1, TimeUnit.SECONDS.toMicros(30)); } - public void testAcquire_ShouldAccumulateTokens() throws InterruptedException { + public void testMethod_ShouldAccumulateTokens() throws InterruptedException { var now = Clock.systemUTC().instant(); var clock = mock(Clock.class); when(clock.instant()).thenReturn(now); @@ -215,11 +226,12 @@ public void testAcquire_ShouldAccumulateTokens() throws InterruptedException { var sleeper = mock(RateLimiter.Sleeper.class); var limiter = new RateLimiter(10, 10, TimeUnit.MINUTES, sleeper, clock); - limiter.acquire(5); - verify(sleeper, times(1)).sleep(0); + + var res = tokenMethod(limiter, 5); + sleepValidationMethod(res, sleeper, 1, 0); // it should accumulate 5 tokens when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(30))); - limiter.acquire(10); - verify(sleeper, times(2)).sleep(0); + res = tokenMethod(limiter, 10); + sleepValidationMethod(res, sleeper, 2, 0); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterAcquireTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterAcquireTests.java new file mode 100644 index 0000000000000..1f59fa7bb5bad --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterAcquireTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.core.TimeValue; + +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class RateLimiterAcquireTests extends BaseRateLimiterTests { + + @Override + protected TimeValue tokenMethod(RateLimiter limiter, int tokens) throws InterruptedException { + limiter.acquire(tokens); + return null; + } + + @Override + protected void sleepValidationMethod( + TimeValue result, + RateLimiter.Sleeper mockSleeper, + int numberOfClassToExpect, + long expectedMicrosecondsToSleep + ) throws InterruptedException { + verify(mockSleeper, times(numberOfClassToExpect)).sleep(expectedMicrosecondsToSleep); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterReserveTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterReserveTests.java new file mode 100644 index 0000000000000..5c32c6c560e7b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterReserveTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.core.TimeValue; + +import static org.hamcrest.Matchers.is; + +public class RateLimiterReserveTests extends BaseRateLimiterTests { + + @Override + protected TimeValue tokenMethod(RateLimiter limiter, int tokens) { + return limiter.reserve(tokens); + } + + @Override + protected void sleepValidationMethod( + TimeValue result, + RateLimiter.Sleeper mockSleeper, + int numberOfClassToExpect, + long expectedMicrosecondsToSleep + ) { + assertThat(result.getMicros(), is(expectedMicrosecondsToSleep)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTimeToReserveTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTimeToReserveTests.java new file mode 100644 index 0000000000000..a69846d67c4e3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTimeToReserveTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.time.Clock; +import java.time.Duration; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RateLimiterTimeToReserveTests extends ESTestCase { + public void testTimeToReserve_Returns_1Second() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, sleeper, clock); + var timeToWait = limiter.timeToReserve(1); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(1))); + } + + public void testTimeToReserve_Returns_1Second_WithoutReservingToken() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, sleeper, clock); + var timeToWait = limiter.timeToReserve(1); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(1))); + + timeToWait = limiter.timeToReserve(1); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(1))); + } + + public void testTimeToReserve_Returns_0Seconds_WhenTokenIsAlreadyAvailable() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.SECONDS, sleeper, clock); + var timeToWait = limiter.timeToReserve(1); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(0))); + } + + public void testTimeToReserve_Returns_0Seconds_WhenTokenIsAlreadyAvailable_WithoutReservingToken() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.SECONDS, sleeper, clock); + var timeToWait = limiter.timeToReserve(1); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(0))); + + timeToWait = limiter.timeToReserve(1); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(0))); + } + + public void testTimeToReserve_Returns_1Seconds_When1TokenIsAlreadyAvailable_ButRequires2Tokens() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.SECONDS, sleeper, clock); + var timeToWait = limiter.timeToReserve(2); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(1))); + } + + public void testTimeToReserve_Returns_1Seconds_When1TokenIsAlreadyAvailable_ButRequires2Tokens_WithoutReservingToken() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.SECONDS, sleeper, clock); + var timeToWait = limiter.timeToReserve(2); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(1))); + + timeToWait = limiter.timeToReserve(2); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(1))); + } + + public void testTimeToReserve_Returns_0Seconds_WhenTimeAdvancesToAccumulate2Tokens() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(2, 1, TimeUnit.SECONDS, sleeper, clock); + // drain the accumulated tokens + var drainedTokensTime = limiter.reserve(2); + assertThat(drainedTokensTime, is(TimeValue.timeValueSeconds(0))); + + when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(2))); + // 2 tokens should now be available + var timeToWait = limiter.timeToReserve(2); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(0))); + } + + public void testTimeToReserve_Returns_0Seconds_WhenTimeAdvancesToAccumulate2Tokens_MethodCallDoesNotReserveTokens() { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(2, 1, TimeUnit.SECONDS, sleeper, clock); + // drain the accumulated tokens + var drainedTokensTime = limiter.reserve(2); + assertThat(drainedTokensTime, is(TimeValue.timeValueSeconds(0))); + + when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(2))); + // 2 tokens should now be available + var timeToWait = limiter.timeToReserve(2); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(0))); + + // 2 tokens should still be available + timeToWait = limiter.timeToReserve(2); + assertThat(timeToWait, is(TimeValue.timeValueSeconds(0))); + } +} From 53d012bcb931bb6d5f2a0b558157a28550f4799d Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 17 Apr 2024 13:29:53 -0400 Subject: [PATCH 082/130] Adding poll peek (#107576) --- .../AdjustableCapacityBlockingQueue.java | 34 +++++++++++ .../AdjustableCapacityBlockingQueueTests.java | 59 +++++++++++++++++++ 2 files changed, 93 insertions(+) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java index e73151b44a3e4..ea600488ea8f6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java @@ -145,6 +145,40 @@ public E take() throws InterruptedException { } } + public E peek() { + final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); + + readLock.lock(); + try { + var oldItem = prioritizedReadingQueue.peek(); + + if (oldItem != null) { + return oldItem; + } + + return currentQueue.peek(); + } finally { + readLock.unlock(); + } + } + + public E poll() { + final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); + + readLock.lock(); + try { + var oldItem = prioritizedReadingQueue.poll(); + + if (oldItem != null) { + return oldItem; + } + + return currentQueue.poll(); + } finally { + readLock.unlock(); + } + } + /** * Returns the number of elements stored in the queue. If the capacity was recently changed, the value returned could be * greater than the capacity. This occurs when the capacity was reduced and there were more elements in the queue than the diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java index 09cd065ce3cd0..5a70b98313f7c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java @@ -233,6 +233,65 @@ public void testTake_RemovesItemFromQueue() throws InterruptedException { assertThat(queue.size(), is(0)); } + public void testPeek_ReturnsItemWithoutRemoving() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + assertThat(queue.size(), is(0)); + + queue.offer(0); + assertThat(queue.size(), is(1)); + assertThat(queue.peek(), is(0)); + assertThat(queue.size(), is(1)); + assertThat(queue.peek(), is(0)); + } + + public void testPeek_ExistingItem_RemainsAtFront_AfterCapacityChange() throws InterruptedException { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + queue.offer(0); + assertThat(queue.size(), is(1)); + assertThat(queue.remainingCapacity(), is(0)); + assertThat(queue.peek(), is(0)); + + queue.setCapacity(2); + assertThat(queue.remainingCapacity(), is(1)); + assertThat(queue.peek(), is(0)); + + queue.offer(1); + assertThat(queue.peek(), is(0)); + assertThat(queue.take(), is(0)); + assertThat(queue.peek(), is(1)); + } + + public void testPoll_ReturnsNull_WhenNoItemsAreAvailable() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + assertNull(queue.poll()); + } + + public void testPoll_ReturnsFirstElement() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + queue.offer(0); + assertThat(queue.poll(), is(0)); + assertThat(queue.size(), is(0)); + assertThat(queue.remainingCapacity(), is(1)); + } + + public void testPoll_ReturnsFirstElement_AfterCapacityIncrease() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + queue.offer(0); + queue.setCapacity(2); + queue.offer(1); + + assertThat(queue.remainingCapacity(), is(0)); + assertThat(queue.size(), is(2)); + + assertThat(queue.poll(), is(0)); + assertThat(queue.size(), is(1)); + assertThat(queue.remainingCapacity(), is(1)); + + assertThat(queue.poll(), is(1)); + assertThat(queue.size(), is(0)); + assertThat(queue.remainingCapacity(), is(2)); + } + public static AdjustableCapacityBlockingQueue.QueueCreator mockQueueCreator(BlockingQueue backingQueue) { return new AdjustableCapacityBlockingQueue.QueueCreator<>() { @Override From 223e7f829bdc1b262344d98ed9deb565086475e3 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 17 Apr 2024 19:37:04 +0200 Subject: [PATCH 083/130] Avoid attempting to load the same empty field twice in fetch phase (#107551) During the fetch phase, there's a number of stored fields that are requested explicitly or loaded by default. That information is included in `StoredFieldsSpec` that each fetch sub phase exposes. We attempt to provide stored fields that are already loaded to the fields lookup that scripts as well as value fetchers use to load field values (via `SearchLookup`). This is done in `PreloadedFieldLookupProvider.` The current logic makes available values for fields that have been found, so that scripts or value fetchers that request them don't load them again ad-hoc. What happens though for stored fields that don't have a value for a specific doc, is that they are treated like any other field that was not requested, and loaded again, although they will not be found, which causes overhead. This change makes available to `PreloadedFieldLookupProvider` the list of required stored fields, so that it can better distinguish between fields that we already attempted to load (although we may not have found a value for them) and those that need to be loaded ad-hoc (for instance because a script is requesting them for the first time). This is an existing issue, that has become evident as we moved fetching of metadata fields to `FetchFieldsPhase`, that relies on value fetchers, and hence on `SearchLookup`. We end up attempting to load default metadata fields (`_ignored` and `_routing`) twice when they are not present in a document, which makes us call `LeafReader#storedFields` additional times for the same document providing a `SingleFieldVisitor` that will never find a value. Another existing issue that this PR fixes is for the `FetchFieldsPhase` to extend the `StoredFieldsSpec` that it exposes to include the metadata fields that the phase is now responsible for loading. That results in `_ignored` being included in the output of the debug stored fields section when profiling is enabled. The fact that it was previously missing is an existing bug (it was missing in `StoredFieldLoader#fieldsToLoad`). Yet another existing issues that this PR fixes is that `_id` has been until now always loaded on demand when requested via fetch fields or script. That is because it is not part of the preloaded stored fields that the fetch phase passes over to the `PreloadedFieldLookupProvider`. That causes overhead as the field has already been loaded, and should not be loaded once again when explicitly requested. --- docs/changelog/107551.yaml | 5 ++ docs/reference/search/profile.asciidoc | 4 +- .../rest-api-spec/test/30_inner_hits.yml | 2 +- .../rest-api-spec/test/search/370_profile.yml | 6 +- .../search/source/MetadataFetchingIT.java | 14 +++++ .../search/fetch/FetchPhase.java | 8 ++- .../fetch/PreloadedFieldLookupProvider.java | 39 +++++++++++-- .../fetch/subphase/FetchFieldsPhase.java | 5 +- .../search/lookup/FieldLookup.java | 4 +- .../PreloadedFieldLookupProviderTests.java | 16 +++++- .../fetch/subphase/FetchFieldsPhaseTests.java | 57 +++++++++++++++++++ 11 files changed, 141 insertions(+), 19 deletions(-) create mode 100644 docs/changelog/107551.yaml diff --git a/docs/changelog/107551.yaml b/docs/changelog/107551.yaml new file mode 100644 index 0000000000000..78e64cc526638 --- /dev/null +++ b/docs/changelog/107551.yaml @@ -0,0 +1,5 @@ +pr: 107551 +summary: Avoid attempting to load the same empty field twice in fetch phase +area: Search +type: bug +issues: [] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3fed14231808c..48c65ed0abc7b 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -194,7 +194,7 @@ The API returns the following result: "load_source_count": 5 }, "debug": { - "stored_fields": ["_id", "_routing", "_source"] + "stored_fields": ["_id", "_ignored", "_routing", "_source"] }, "children": [ { @@ -1051,7 +1051,7 @@ And here is the fetch profile: "load_source_count": 5 }, "debug": { - "stored_fields": ["_id", "_routing", "_source"] + "stored_fields": ["_id", "_ignored", "_routing", "_source"] }, "children": [ { diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml index a561ebbae00e9..eff9a9beb35bc 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml @@ -140,7 +140,7 @@ profile fetch: - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _ignored, _routing, _source] } - length: { profile.shards.0.fetch.children: 4 } - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 200f7292291b1..dda3d14a5ae1d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -41,7 +41,7 @@ fetch fields: - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _ignored, _routing, _source] } - length: { profile.shards.0.fetch.children: 2 } - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } @@ -74,7 +74,7 @@ fetch source: - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _ignored, _routing, _source] } - length: { profile.shards.0.fetch.children: 3 } - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } - match: { profile.shards.0.fetch.children.1.type: FetchSourcePhase } @@ -139,7 +139,7 @@ fetch nested source: - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } - - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _ignored, _routing, _source] } - length: { profile.shards.0.fetch.children: 4 } - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } - match: { profile.shards.0.fetch.children.1.type: FetchSourcePhase } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index b8d1d45a6f85d..9e0dd984c9a2a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -168,4 +168,18 @@ public void testInvalid() { assertThat(exc.getMessage(), equalTo("cannot combine _none_ with other fields")); } } + + public void testFetchId() { + assertAcked(prepareCreate("test")); + ensureGreen(); + + prepareIndex("test").setId("1").setSource("field", "value").get(); + refresh(); + + assertResponse(prepareSearch("test").addFetchField("_id"), response -> { + assertEquals(1, response.getHits().getHits().length); + assertEquals("1", response.getHits().getAt(0).getId()); + assertEquals("1", response.getHits().getAt(0).field("_id").getValue()); + }); + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 2fa3e903a0074..4b5c647da0c9a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -112,9 +112,13 @@ private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Pr context.getSearchExecutionContext().setLookupProviders(sourceProvider, ctx -> fieldLookupProvider); List processors = getProcessors(context.shardTarget(), fetchContext, profiler); - StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.build(processors, FetchSubPhaseProcessor::storedFieldsSpec); storedFieldsSpec = storedFieldsSpec.merge(new StoredFieldsSpec(false, false, sourceLoader.requiredStoredFields())); + // Ideally the required stored fields would be provided as constructor argument a few lines above, but that requires moving + // the getProcessors call to before the setLookupProviders call, which causes weird issues in InnerHitsPhase. + // setLookupProviders resets the SearchLookup used throughout the rest of the fetch phase, which StoredValueFetchers rely on + // to retrieve stored fields, and InnerHitsPhase is the last sub-fetch phase and re-runs the entire fetch phase. + fieldLookupProvider.setPreloadedStoredFieldNames(storedFieldsSpec.requiredStoredFields()); StoredFieldLoader storedFieldLoader = profiler.storedFields(StoredFieldLoader.fromSpec(storedFieldsSpec)); IdLoader idLoader = context.newIdLoader(); @@ -164,7 +168,7 @@ protected SearchHit nextDoc(int doc) throws IOException { leafIdLoader ); sourceProvider.source = hit.source(); - fieldLookupProvider.storedFields = hit.loadedFields(); + fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); for (FetchSubPhaseProcessor processor : processors) { processor.process(hit); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProvider.java b/server/src/main/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProvider.java index 31cd74c878a0f..b335ce4aa2800 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProvider.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProvider.java @@ -9,12 +9,16 @@ package org.elasticsearch.search.fetch; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.search.lookup.FieldLookup; import org.elasticsearch.search.lookup.LeafFieldLookupProvider; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Supplier; /** @@ -26,15 +30,22 @@ */ class PreloadedFieldLookupProvider implements LeafFieldLookupProvider { - Map> storedFields; - LeafFieldLookupProvider backUpLoader; - Supplier loaderSupplier; + private final SetOnce> preloadedStoredFieldNames = new SetOnce<>(); + private Map> preloadedStoredFieldValues; + private String id; + private LeafFieldLookupProvider backUpLoader; + private Supplier loaderSupplier; @Override public void populateFieldLookup(FieldLookup fieldLookup, int doc) throws IOException { String field = fieldLookup.fieldType().name(); - if (storedFields.containsKey(field)) { - fieldLookup.setValues(storedFields.get(field)); + + if (field.equals(IdFieldMapper.NAME)) { + fieldLookup.setValues(Collections.singletonList(id)); + return; + } + if (preloadedStoredFieldNames.get().contains(field)) { + fieldLookup.setValues(preloadedStoredFieldValues.get(field)); return; } // stored field not preloaded, go and get it directly @@ -44,8 +55,26 @@ public void populateFieldLookup(FieldLookup fieldLookup, int doc) throws IOExcep backUpLoader.populateFieldLookup(fieldLookup, doc); } + void setPreloadedStoredFieldNames(Set preloadedStoredFieldNames) { + this.preloadedStoredFieldNames.set(preloadedStoredFieldNames); + } + + void setPreloadedStoredFieldValues(String id, Map> preloadedStoredFieldValues) { + assert preloadedStoredFieldNames.get().containsAll(preloadedStoredFieldValues.keySet()) + : "Provided stored field that was not expected to be preloaded? " + + preloadedStoredFieldValues.keySet() + + " - " + + preloadedStoredFieldNames; + this.preloadedStoredFieldValues = preloadedStoredFieldValues; + this.id = id; + } + void setNextReader(LeafReaderContext ctx) { backUpLoader = null; loaderSupplier = () -> LeafFieldLookupProvider.fromStoredFields().apply(ctx); } + + LeafFieldLookupProvider getBackUpLoader() { + return backUpLoader; + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index 882eb1cf9c75b..287c47505bf3a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -40,6 +40,7 @@ public final class FetchFieldsPhase implements FetchSubPhase { private static final List DEFAULT_METADATA_FIELDS = List.of( new FieldAndFormat(IgnoredFieldMapper.NAME, null), new FieldAndFormat(RoutingFieldMapper.NAME, null), + // will only be fetched when mapped (older archived indices) new FieldAndFormat(LegacyTypeFieldMapper.NAME, null) ); @@ -95,9 +96,9 @@ public void setNextReader(LeafReaderContext readerContext) { @Override public StoredFieldsSpec storedFieldsSpec() { if (fieldFetcher != null) { - return fieldFetcher.storedFieldsSpec(); + return metadataFieldFetcher.storedFieldsSpec().merge(fieldFetcher.storedFieldsSpec()); } - return StoredFieldsSpec.NO_REQUIREMENTS; + return metadataFieldFetcher.storedFieldsSpec(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index fa4eb8f21f78c..97ff1862c7852 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -31,7 +31,9 @@ public MappedFieldType fieldType() { */ public void setValues(List values) { assert valuesLoaded == false : "Call clear() before calling setValues()"; - values.stream().map(fieldType::valueForDisplay).forEach(this.values::add); + if (values != null) { + values.stream().map(fieldType::valueForDisplay).forEach(this.values::add); + } this.valuesLoaded = true; } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProviderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProviderTests.java index 85d9c32a1ee5b..13cdb01156f05 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProviderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/PreloadedFieldLookupProviderTests.java @@ -13,11 +13,13 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.lookup.FieldLookup; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; @@ -30,7 +32,16 @@ public class PreloadedFieldLookupProviderTests extends ESTestCase { public void testFallback() throws IOException { PreloadedFieldLookupProvider lookup = new PreloadedFieldLookupProvider(); - lookup.storedFields = Map.of("foo", List.of("bar")); + lookup.setPreloadedStoredFieldNames(Collections.singleton("foo")); + lookup.setPreloadedStoredFieldValues("id", Map.of("foo", List.of("bar"))); + + MappedFieldType idFieldType = mock(MappedFieldType.class); + when(idFieldType.name()).thenReturn(IdFieldMapper.NAME); + when(idFieldType.valueForDisplay(any())).then(invocation -> (invocation.getArguments()[0])); + FieldLookup idFieldLookup = new FieldLookup(idFieldType); + lookup.populateFieldLookup(idFieldLookup, 0); + assertEquals("id", idFieldLookup.getValue()); + assertNull(lookup.getBackUpLoader()); // fallback didn't get used because 'foo' is in the list MappedFieldType fieldType = mock(MappedFieldType.class); when(fieldType.name()).thenReturn("foo"); @@ -39,7 +50,7 @@ public void testFallback() throws IOException { lookup.populateFieldLookup(fieldLookup, 0); assertEquals("BAR", fieldLookup.getValue()); - assertNull(lookup.backUpLoader); // fallback didn't get used because 'foo' is in the list + assertNull(lookup.getBackUpLoader()); // fallback didn't get used because 'foo' is in the list MappedFieldType unloadedFieldType = mock(MappedFieldType.class); when(unloadedFieldType.name()).thenReturn("unloaded"); @@ -56,5 +67,4 @@ public void testFallback() throws IOException { lookup.populateFieldLookup(unloadedFieldLookup, 0); assertEquals("VALUE", unloadedFieldLookup.getValue()); } - } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index 39e73837c83ea..3a7460c05ca87 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.mapper.DocValueFetcher; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; +import org.elasticsearch.index.mapper.StoredValueFetcher; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; @@ -26,6 +27,9 @@ import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; +import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.StoredFieldsSpec; +import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.test.ESTestCase; @@ -35,6 +39,7 @@ import java.util.Set; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -90,6 +95,58 @@ public void testDocValueFetcher() throws IOException { reader.close(); dir.close(); + } + + public void testStoredFieldsSpec() { + StoredFieldsContext storedFieldsContext = StoredFieldsContext.fromList(List.of("stored", "_metadata")); + FetchFieldsContext ffc = new FetchFieldsContext(List.of(new FieldAndFormat("field", null))); + + SearchLookup searchLookup = mock(SearchLookup.class); + + SearchExecutionContext sec = mock(SearchExecutionContext.class); + when(sec.isMetadataField(any())).then(invocation -> invocation.getArguments()[0].toString().startsWith("_")); + + MappedFieldType routingFt = mock(MappedFieldType.class); + when(routingFt.valueFetcher(any(), any())).thenReturn(new StoredValueFetcher(searchLookup, "_routing")); + when(sec.getFieldType(eq("_routing"))).thenReturn(routingFt); + + // this would normally not be mapped -> getMatchingFieldsNames would not resolve it (unless for older archive indices) + MappedFieldType typeFt = mock(MappedFieldType.class); + when(typeFt.valueFetcher(any(), any())).thenReturn(new StoredValueFetcher(searchLookup, "_type")); + when(sec.getFieldType(eq("_type"))).thenReturn(typeFt); + + MappedFieldType ignoredFt = mock(MappedFieldType.class); + when(ignoredFt.valueFetcher(any(), any())).thenReturn(new StoredValueFetcher(searchLookup, "_ignored")); + when(sec.getFieldType(eq("_ignored"))).thenReturn(ignoredFt); + + // Ideally we would test that explicitly requested stored fields are included in stored fields spec, but isStored is final hence it + // can't be mocked. In reality, _metadata would be included but stored would not. + MappedFieldType storedFt = mock(MappedFieldType.class); + when(sec.getFieldType(eq("stored"))).thenReturn(storedFt); + MappedFieldType metadataFt = mock(MappedFieldType.class); + when(sec.getFieldType(eq("_metadata"))).thenReturn(metadataFt); + + MappedFieldType fieldType = mock(MappedFieldType.class); + when(fieldType.valueFetcher(any(), any())).thenReturn( + new DocValueFetcher( + DocValueFormat.RAW, + new SortedNumericIndexFieldData("field", IndexNumericFieldData.NumericType.LONG, CoreValuesSourceType.NUMERIC, null, false) + ) + ); + when(sec.getFieldType(eq("field"))).thenReturn(fieldType); + when(sec.getMatchingFieldNames(any())).then(invocation -> Set.of(invocation.getArguments()[0])); + when(sec.nestedLookup()).thenReturn(NestedLookup.EMPTY); + FetchContext fetchContext = mock(FetchContext.class); + when(fetchContext.fetchFieldsContext()).thenReturn(ffc); + when(fetchContext.storedFieldsContext()).thenReturn(storedFieldsContext); + when(fetchContext.getSearchExecutionContext()).thenReturn(sec); + FetchFieldsPhase fetchFieldsPhase = new FetchFieldsPhase(); + FetchSubPhaseProcessor processor = fetchFieldsPhase.getProcessor(fetchContext); + StoredFieldsSpec storedFieldsSpec = processor.storedFieldsSpec(); + assertEquals(3, storedFieldsSpec.requiredStoredFields().size()); + assertTrue(storedFieldsSpec.requiredStoredFields().contains("_routing")); + assertTrue(storedFieldsSpec.requiredStoredFields().contains("_ignored")); + assertTrue(storedFieldsSpec.requiredStoredFields().contains("_type")); } } From 19db4903d10689b736da8a60d96530424c1978d7 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 17 Apr 2024 20:22:53 +0200 Subject: [PATCH 084/130] Update skip for profile yaml tests following #107551 --- .../rest-api-spec/test/search/370_profile.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index dda3d14a5ae1d..817c62dbdd12d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -22,8 +22,8 @@ setup: --- fetch fields: - skip: - version: ' - 8.13.99' - reason: fetch fields and stored_fields using ValueFetcher + version: ' - 8.14.99' + reason: _ignored is returned only from 8.15 on - do: search: @@ -57,8 +57,8 @@ fetch fields: --- fetch source: - skip: - version: ' - 8.13.99' - reason: fetch fields and stored_fields using ValueFetcher + version: ' - 8.14.99' + reason: _ignored is returned only from 8.15 on - do: search: @@ -88,8 +88,8 @@ fetch source: --- fetch nested source: - skip: - version: ' - 8.13.99' - reason: fetch fields and stored_fields using ValueFetcher + version: ' - 8.14.99' + reason: _ignored is returned only from 8.15 on - do: indices.create: From 732c7c4c30e8ac068d267a393315695e5d7573cb Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 17 Apr 2024 21:36:26 +0300 Subject: [PATCH 085/130] [DSL] Remove REST APIs for global retention (#107565) --- docs/changelog/105682.yaml | 20 -- .../data-streams/data-stream-apis.asciidoc | 12 - .../apis/delete-global-retention.asciidoc | 121 ---------- .../apis/get-global-retention.asciidoc | 90 -------- .../lifecycle/apis/get-lifecycle.asciidoc | 8 +- .../apis/put-global-retention.asciidoc | 131 ----------- .../data-streams/lifecycle/index.asciidoc | 12 +- ...rial-manage-data-stream-retention.asciidoc | 183 --------------- ...orial-manage-existing-data-stream.asciidoc | 4 +- .../tutorial-manage-new-data-stream.asciidoc | 17 +- ...grate-data-stream-from-ilm-to-dsl.asciidoc | 16 +- .../DataStreamGlobalRetentionIT.java | 147 ------------ ...treamGlobalRetentionPermissionsRestIT.java | 213 ------------------ .../datastreams/DataStreamsPlugin.java | 6 - .../ExplainDataStreamLifecycleAction.java | 7 +- .../action/GetDataStreamLifecycleAction.java | 27 ++- ...DeleteDataStreamGlobalRetentionAction.java | 49 ---- ...estGetDataStreamGlobalRetentionAction.java | 47 ---- ...estPutDataStreamGlobalRetentionAction.java | 53 ----- ...plainDataStreamLifecycleResponseTests.java | 18 -- .../lifecycle/10_explain_lifecycle.yml | 16 +- .../190_create_data_stream_with_lifecycle.yml | 6 +- .../test/data_stream/lifecycle/20_basic.yml | 15 +- .../data_stream/lifecycle/30_not_found.yml | 23 +- .../lifecycle/40_global_retention.yml | 139 ------------ .../data_streams.delete_global_retention.json | 35 --- .../data_streams.get_global_retention.json | 29 --- .../data_streams.put_global_retention.json | 39 ---- .../cluster.component_template/10_basic.yml | 12 +- .../indices.get_index_template/10_basic.yml | 14 +- .../10_basic.yml | 6 +- .../indices.simulate_template/10_basic.yml | 30 ++- .../get/GetComponentTemplateAction.java | 4 +- .../get/GetComposableIndexTemplateAction.java | 4 +- .../post/SimulateIndexTemplateResponse.java | 4 +- .../datastreams/GetDataStreamAction.java | 4 +- .../ExplainIndexDataStreamLifecycle.java | 6 +- .../GetComponentTemplateResponseTests.java | 4 - 38 files changed, 79 insertions(+), 1492 deletions(-) delete mode 100644 docs/changelog/105682.yaml delete mode 100644 docs/reference/data-streams/lifecycle/apis/delete-global-retention.asciidoc delete mode 100644 docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc delete mode 100644 docs/reference/data-streams/lifecycle/apis/put-global-retention.asciidoc delete mode 100644 docs/reference/data-streams/lifecycle/tutorial-manage-data-stream-retention.asciidoc delete mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionIT.java delete mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionPermissionsRestIT.java delete mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamGlobalRetentionAction.java delete mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamGlobalRetentionAction.java delete mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamGlobalRetentionAction.java delete mode 100644 modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.delete_global_retention.json delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.get_global_retention.json delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.put_global_retention.json diff --git a/docs/changelog/105682.yaml b/docs/changelog/105682.yaml deleted file mode 100644 index f1713357ace80..0000000000000 --- a/docs/changelog/105682.yaml +++ /dev/null @@ -1,20 +0,0 @@ -pr: 105682 -summary: Introduce global retention in data stream lifecycle. -area: Data streams -type: feature -issues: - - 106169 -highlight: - title: Add global retention in data stream lifecycle - body: |- - Data stream lifecycle now supports configuring retention on a cluster level, namely global retention. Global retention - allows us to configure two different retentions: - - - `default_retention` is applied to all data streams managed by the data stream lifecycle that do not have retention - defined on the data stream level. - - `max_retention` is applied to all data streams managed by the data stream lifecycle and it allows any data stream - data to be deleted after the `max_retention` has passed. - - Furthermore, we introduce the term `effective_retention` which is the retention applied at a certain moment to a data - stream considering all the available retention configurations. - notable: true \ No newline at end of file diff --git a/docs/reference/data-streams/data-stream-apis.asciidoc b/docs/reference/data-streams/data-stream-apis.asciidoc index d525f0d8a7885..c13703ab2a6ee 100644 --- a/docs/reference/data-streams/data-stream-apis.asciidoc +++ b/docs/reference/data-streams/data-stream-apis.asciidoc @@ -27,12 +27,6 @@ preview:[] preview:[] * <> preview:[] -* <> -preview:[] -* <> -preview:[] -* <> -preview:[] The following API is available for <>: @@ -65,10 +59,4 @@ include::{es-ref-dir}/data-streams/lifecycle/apis/explain-lifecycle.asciidoc[] include::{es-ref-dir}/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc[] -include::{es-ref-dir}/data-streams/lifecycle/apis/put-global-retention.asciidoc[] - -include::{es-ref-dir}/data-streams/lifecycle/apis/get-global-retention.asciidoc[] - -include::{es-ref-dir}/data-streams/lifecycle/apis/delete-global-retention.asciidoc[] - include::{es-ref-dir}/indices/downsample-data-stream.asciidoc[] diff --git a/docs/reference/data-streams/lifecycle/apis/delete-global-retention.asciidoc b/docs/reference/data-streams/lifecycle/apis/delete-global-retention.asciidoc deleted file mode 100644 index 5b211eaf09e13..0000000000000 --- a/docs/reference/data-streams/lifecycle/apis/delete-global-retention.asciidoc +++ /dev/null @@ -1,121 +0,0 @@ -[[data-streams-delete-global-retention]] -=== Delete the global retention of data streams -++++ -Delete Data Stream Global Retention -++++ - -preview::[] - -Deletes the global retention configuration that applies on every data stream managed by <>. - -[[delete-global-retention-api-prereqs]] -==== {api-prereq-title} - -** If the {es} {security-features} are enabled, you must have the `manage_data_stream_global_retention` <> to use this API. - -[[data-streams-delete-global-retention-request]] -==== {api-request-title} - -`DELETE _data_stream/_global_retention` - -[[data-streams-delete-global-retention-desc]] -==== {api-description-title} - -Deletes the global retention configuration that is applied on data streams managed by data stream lifecycle. - -[role="child_attributes"] -[[delete-global-retention-api-query-parms]] -==== {api-query-parms-title} - -`dry_run`:: -(Boolean) Signals that the request should determine the effect of the removal of the existing without updating -the global retention. The default value is `false`, which means the removal will happen. - -[[delete-global-retention-api-response-body]] -==== {api-response-body-title} - -`acknowledged`:: -(boolean) -True, if the global retention has been removed. False, if it fails or if it was a dry run. - -`dry_run`:: -(boolean) -True, if this was a dry run, false otherwise. - -`affected_data_streams`:: -(array of objects) -Contains information about the data streams affected by the change. -+ -.Properties of objects in `affected_data_streams` -[%collapsible%open] -==== -`name`:: -(string) -Name of the data stream. -`previous_effective_retention`:: -(string) -The retention that was effective before the change of this request. `infinite` if there was no retention applicable. -`new_effective_retention`:: -(string) -The retention that is or would be effective after this request. `infinite` if there is no retention applicable. -==== - -[[data-streams-delete-global-retention-example]] -==== {api-examples-title} - -//// - -[source,console] --------------------------------------------------- -PUT _data_stream/_global_retention -{ - "default_retention": "7d", - "max_retention": "90d" -} - -PUT /_index_template/template -{ - "index_patterns": ["my-data-stream*"], - "template": { - "lifecycle": {} - }, - "data_stream": { } -} - -PUT /_data_stream/my-data-stream ----- -// TESTSETUP -//// - -//// -[source,console] ----- -DELETE /_data_stream/my-data-stream* -DELETE /_index_template/template -DELETE /_data_stream/_global_retention ----- -// TEARDOWN -//// - -Let's update the global retention: -[source,console] --------------------------------------------------- -DELETE _data_stream/_global_retention --------------------------------------------------- - -The response will look like the following: - -[source,console-result] --------------------------------------------------- -{ - "acknowledged": true, - "dry_run": false, - "affected_data_streams": [ - { - "name": "my-data-stream", - "previous_effective_retention": "7d", - "new_effective_retention": "infinite" - } - ] -} --------------------------------------------------- diff --git a/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc deleted file mode 100644 index 0997c2d84ece2..0000000000000 --- a/docs/reference/data-streams/lifecycle/apis/get-global-retention.asciidoc +++ /dev/null @@ -1,90 +0,0 @@ -[[data-streams-get-global-retention]] -=== Get the global retention of data streams -++++ -Get Data Stream Global Retention -++++ - -preview::[] - -Gets the global retention that applies on every data stream managed by <>. - -[[get-global-retention-api-prereqs]] -==== {api-prereq-title} - -** If the {es} {security-features} are enabled, you must have the `monitor_data_stream_global_retention` or -`manage_data_stream_global_retention` <> to use this API. - -[[data-streams-get-global-retention-request]] -==== {api-request-title} - -`GET _data_stream/_global_retention` - -[[data-streams-get-global-retention-desc]] -==== {api-description-title} - -Gets the global retention configuration that is applied on data streams managed by data stream lifecycle. - -[role="child_attributes"] -[[get-global-retention-api-query-parms]] -==== {api-query-parms-title} - -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=local] - -include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=master-timeout] - -[[get-global-retention-api-response-body]] -==== {api-response-body-title} - -`default_retention`:: -(Optional, string) -The default retention that will apply to any data stream managed by data stream lifecycle that does not have a retention -defined on the data stream level. - -`max_retention`:: -(Optional, string) -The max retention that will apply to all data streams managed by data stream lifecycle. The max retention will override the -retention of a data stream whose retention exceeds the max retention. - - -[[data-streams-get-global-retention-example]] -==== {api-examples-title} - -//// - -[source,console] --------------------------------------------------- -PUT _data_stream/_global_retention -{ - "default_retention": "7d", - "max_retention": "90d" -} --------------------------------------------------- -// TESTSETUP - -[source,console] --------------------------------------------------- -DELETE _data_stream/_global_retention --------------------------------------------------- -// TEARDOWN - -//// - -Let's retrieve the global retention: - -[source,console] --------------------------------------------------- -GET _data_stream/_global_retention --------------------------------------------------- - -The response will look like the following: - -[source,console-result] --------------------------------------------------- -{ - "default_retention": "7d", <1> - "max_retention": "90d" <2> -} --------------------------------------------------- -<1> 7 days retention will be applied to any data stream that does not have retention set in its lifecycle. -<2> 90 days retention will be applied to all data streams that have retention that exceeds the 90 days, this -applies to data streams that have infinite retention too. \ No newline at end of file diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index 83955417abd0b..0d80a31bd4f5a 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -130,18 +130,14 @@ The response will look like the following: "name": "my-data-stream-1", "lifecycle": { "enabled": true, - "data_retention": "7d", - "effective_retention": "7d", - "retention_determined_by": "data_stream_configuration" + "data_retention": "7d" } }, { "name": "my-data-stream-2", "lifecycle": { "enabled": true, - "data_retention": "7d", - "effective_retention": "7d", - "retention_determined_by": "data_stream_configuration" + "data_retention": "7d" } } ] diff --git a/docs/reference/data-streams/lifecycle/apis/put-global-retention.asciidoc b/docs/reference/data-streams/lifecycle/apis/put-global-retention.asciidoc deleted file mode 100644 index c9bc804c13408..0000000000000 --- a/docs/reference/data-streams/lifecycle/apis/put-global-retention.asciidoc +++ /dev/null @@ -1,131 +0,0 @@ -[[data-streams-put-global-retention]] -=== Update the global retention of data streams -++++ -Update Data Stream Global Retention -++++ - -preview::[] - -Updates the global retention configuration that applies on every data stream managed by <>. - -[[put-global-retention-api-prereqs]] -==== {api-prereq-title} - -** If the {es} {security-features} are enabled, you must have the `manage_data_stream_global_retention` <> to use this API. - -[[data-streams-put-global-retention-request]] -==== {api-request-title} - -`PUT _data_stream/_global_retention` - -[[data-streams-put-global-retention-desc]] -==== {api-description-title} - -Updates the global retention configuration that is applied on data streams managed by data stream lifecycle. - -[role="child_attributes"] -[[put-global-retention-api-query-parms]] -==== {api-query-parms-title} - -`dry_run`:: -(Boolean) Signals that the request should determine the effect of the provided configuration without updating the -global retention settings. The default value is `false`, which means the configuration provided will be applied. - -[[put-global-retention-api-request-body]] -==== {api-request-body-title} - -`default_retention`:: -(Optional, string) -The default retention that will apply to any data stream managed by data stream lifecycle that does not have a retention -defined on the data stream level. - -`max_retention`:: -(Optional, string) -The max retention that will apply to all data streams managed by data stream lifecycle. The max retention will override the -retention of a data stream which retention exceeds the max retention. - -[[put-global-retention-api-response-body]] -==== {api-response-body-title} - -`acknowledged`:: -(boolean) -True, if the global retention has been updated to the provided values. False, if it fails or if it was a dry run. - -`dry_run`:: -(boolean) -True, if this was a dry run, false otherwise. - -`affected_data_streams`:: -(array of objects) -Contains information about the data streams affected by the change. -+ -.Properties of objects in `affected_data_streams` -[%collapsible%open] -==== -`name`:: -(string) -Name of the data stream. -`previous_effective_retention`:: -(string) -The retention that was effective before the change of this request. `infinite` if there was no retention applicable. -`new_effective_retention`:: -(string) -The retention that is or would be effective after this request. `infinite` if there is no retention applicable. -==== - -[[data-streams-put-global-retention-example]] -==== {api-examples-title} - -//// -[source,console] ----- -PUT /_index_template/template -{ - "index_patterns": ["my-data-stream*"], - "template": { - "lifecycle": {} - }, - "data_stream": { } -} - -PUT /_data_stream/my-data-stream ----- -// TESTSETUP -//// - -//// -[source,console] ----- -DELETE /_data_stream/my-data-stream* -DELETE /_index_template/template -DELETE /_data_stream/_global_retention ----- -// TEARDOWN -//// - -Let's update the global retention: -[source,console] --------------------------------------------------- -PUT _data_stream/_global_retention -{ - "default_retention": "7d", - "max_retention": "90d" -} --------------------------------------------------- - -The response will look like the following: - -[source,console-result] --------------------------------------------------- -{ - "acknowledged": true, - "dry_run": false, - "affected_data_streams": [ - { - "name": "my-data-stream", - "previous_effective_retention": "infinite", - "new_effective_retention": "7d" - } - ] -} --------------------------------------------------- diff --git a/docs/reference/data-streams/lifecycle/index.asciidoc b/docs/reference/data-streams/lifecycle/index.asciidoc index dff3dae22f8ef..bf861df7c80d4 100644 --- a/docs/reference/data-streams/lifecycle/index.asciidoc +++ b/docs/reference/data-streams/lifecycle/index.asciidoc @@ -16,8 +16,7 @@ To achieve that, it supports: * Automatic <>, which chunks your incoming data in smaller pieces to facilitate better performance and backwards incompatible mapping changes. * Configurable retention, which allows you to configure the time period for which your data is guaranteed to be stored. -{es} is allowed at a later time to delete data older than this time period. Retention can be configured on the data stream level -or on a global level. Read more about the different options in this <>. +{es} is allowed at a later time to delete data older than this time period. A data stream lifecycle also supports downsampling the data stream backing indices. See <> for @@ -43,10 +42,9 @@ data that is most likely to keep being queried. 4. If <> is configured it will execute all the configured downsampling rounds. 5. Applies retention to the remaining backing indices. This means deleting the backing indices whose -`generation_time` is longer than the effective retention period (read more about the -<>). The `generation_time` is only applicable to rolled -over backing indices and it is either the time since the backing index got rolled over, or the time optionally configured -in the <> setting. +`generation_time` is longer than the configured retention period. The `generation_time` is only applicable to rolled over backing +indices and it is either the time since the backing index got rolled over, or the time optionally configured in the +<> setting. IMPORTANT: We use the `generation_time` instead of the creation time because this ensures that all data in the backing index have passed the retention period. As a result, the retention period is not the exact time data gets deleted, but @@ -79,6 +77,4 @@ include::tutorial-manage-new-data-stream.asciidoc[] include::tutorial-manage-existing-data-stream.asciidoc[] -include::tutorial-manage-data-stream-retention.asciidoc[] - include::tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc[] diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-data-stream-retention.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-data-stream-retention.asciidoc deleted file mode 100644 index 7b84cd238ce49..0000000000000 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-data-stream-retention.asciidoc +++ /dev/null @@ -1,183 +0,0 @@ -[role="xpack"] -[[tutorial-manage-data-stream-retention]] -=== Tutorial: Data stream retention - -preview::[] - -In this tutorial, we are going to go over the data stream lifecycle retention, define it, go over how it can be configured and how -it can be applied. Keep in mind, the following options apply only to data streams that are managed by the data stream lifecycle. - -. <> -. <> -. <> -. <> - -You can verify if a data steam is managed by the data stream lifecycle via the <>: - -//// -[source,console] ----- -PUT /_index_template/template -{ - "index_patterns": ["my-data-stream*"], - "template": { - "lifecycle": {} - }, - "data_stream": { } -} - -PUT /_data_stream/my-data-stream ----- -// TESTSETUP -//// - -//// -[source,console] ----- -DELETE /_data_stream/my-data-stream* -DELETE /_index_template/template -DELETE /_data_stream/_global_retention ----- -// TEARDOWN -//// - -[source,console] --------------------------------------------------- -GET _data_stream/my-data-stream/_lifecycle --------------------------------------------------- - -The result should look like this: - -[source,console-result] --------------------------------------------------- -{ - "data_streams": [ - { - "name": "my-data-stream", <1> - "lifecycle": { - "enabled": true <2> - } - } - ] -} --------------------------------------------------- -// TESTRESPONSE[skip:the result is for illustrating purposes only] -<1> The name of your data stream. -<2> Ensure that the lifecycle is enabled, meaning this should be `true`. - -[discrete] -[[what-is-retention]] -==== What is data stream retention? - -We define retention as the least amount of time the data of a data stream are going to be kept in {es}. After this time period -has passed, {es} is allowed to remove these data to free up space and/or manage costs. - -NOTE: Retention does not define the period that the data will be removed, but the minimum time period they will be kept. - -We define 4 different types of retention: - -* The data stream retention, or `data_retention`, which is the retention configured on the data stream level. It can be -set via an <> for future data streams or via the <> for an existing data stream. When the data stream retention is not set, it implies that the data -need to be kept forever. -* The global default retention, or `default_retention`, which is a retention configured on a cluster level and will be -applied to all data streams managed by data stream lifecycle that do not have `data_retention` configured. Effectively, -it ensures that there will be no data streams keeping their data forever. This can be set via the -<>. -* The global max retention, or `max_retention`, which is a retention configured on a cluster level and will be applied to -all data streams managed by data stream lifecycle. Effectively, it ensures that there will be no data streams whose retention -will exceed this time period. This can be set via the <>. -* The effective retention, or `effective_retention`, which is the retention applied at a data stream on a given moment. -Effective retention cannot be set, it is derived by taking into account all the configured retention listed above and is -calculated as it is described <>. - -[discrete] -[[retention-configuration]] -==== How to configure retention? - -- By setting the `data_retention` on the data stream level. This retention can be configured in two ways: -+ --- For new data streams, it can be defined in the index template that would be applied during the data stream's creation. -You can use the <>, for example: -+ -[source,console] --------------------------------------------------- -PUT _index_template/template -{ - "index_patterns": ["my-data-stream*"], - "data_stream": { }, - "priority": 500, - "template": { - "lifecycle": { - "data_retention": "7d" - } - }, - "_meta": { - "description": "Template with data stream lifecycle" - } -} --------------------------------------------------- --- For an existing data stream, it can be set via the <>. -+ -[source,console] ----- -PUT _data_stream/my-data-stream/_lifecycle -{ - "data_retention": "30d" <1> -} ----- -// TEST[continued] -<1> The retention period of this data stream is set to 30 days. - -- By setting the global retention via the `default_retention` and `max_retention` that are set on a cluster level. You -can set them via the <>. For example: -+ -[source,console] --------------------------------------------------- -PUT _data_stream/_global_retention -{ - "default_retention": "7d", - "max_retention": "90d" -} --------------------------------------------------- -// TEST[continued] - -[discrete] -[[effective-retention-calculation]] -==== How is the effective retention calculated? -The effective is calculated in the following way: - -- The `effective_retention` is the `default_retention`, when `default_retention` is defined and the data stream does not -have `data_retention`. -- The `effective_retention` is the `data_retention`, when `data_retention` is defined and if `max_retention` is defined, -it is less than the `max_retention`. -- The `effective_retention` is the `max_retention`, when `max_retention` is defined, and the data stream has either no -`data_retention` or its `data_retention` is greater than the `max_retention`. - -The above is demonstrated in the examples below: - -|=== -|`default_retention` |`max_retention` |`data_retention` |`effective_retention` |Retention determined by - -|Not set |Not set |Not set |Infinite |N/A -|Not relevant |12 months |**30 days** |30 days |`data_retention` -|Not relevant |Not set |**30 days** |30 days |`data_retention` -|**30 days** |12 months |Not set |30 days |`default_retention` -|**30 days** |30 days |Not set |30 days |`default_retention` -|Not relevant |**30 days** |12 months |30 days |`max_retention` -|Not set |**30 days** |Not set |30 days |`max_retention` -|=== - -[discrete] -[[effective-retention-application]] -==== How is the effective retention applied? - -Retention is applied to the remaining backing indices of a data stream as the last step of -<>. Data stream lifecycle will retrieve the backing indices -whose `generation_time` is longer than the effective retention period and delete them. The `generation_time` is only -applicable to rolled over backing indices and it is either the time since the backing index got rolled over, or the time -optionally configured in the <> setting. - -IMPORTANT: We use the `generation_time` instead of the creation time because this ensures that all data in the backing -index have passed the retention period. As a result, the retention period is not the exact time data get deleted, but -the minimum time data will be stored. \ No newline at end of file diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc index 7be2b30b9b83c..5670faaade3ce 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc @@ -74,9 +74,7 @@ The response will look like: "generation_time": "6.84s", <9> "lifecycle": { "enabled": true, - "data_retention": "30d", - "effective_retention": "30d" <10> - "retention_determined_by": "data_stream_configuration" + "data_retention": "30d" <10> } } } diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc index ecfdc16884082..6f1d81ab6ead2 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc @@ -93,12 +93,10 @@ The result will look like this: { "data_streams": [ { - "name": "my-data-stream", <1> + "name": "my-data-stream",<1> "lifecycle": { - "enabled": true, <2> - "data_retention": "7d", <3> - "effective_retention": "7d", <4> - "retention_determined_by": "data_stream_configuration" <5> + "enabled": true, <2> + "data_retention": "7d" <3> } } ] @@ -106,11 +104,8 @@ The result will look like this: -------------------------------------------------- <1> The name of your data stream. <2> Shows if the data stream lifecycle is enabled for this data stream. -<3> The desired retention period of the data indexed in this data stream, this means that if there are no other limitations -the data for this data stream will be preserved for at least 7 days. -<4> The effective retention, this means that the data in this data stream will +<3> The retention period of the data indexed in this data stream, this means that the data in this data stream will be kept at least for 7 days. After that {es} can delete it at its own discretion. -<5> The configuration that determined the effective retention. If you want to see more information about how the data stream lifecycle is applied on individual backing indices use the <>: @@ -133,9 +128,7 @@ The result will look like this: "time_since_index_creation": "1.6m", <3> "lifecycle": { <4> "enabled": true, - "data_retention": "7d", - "effective_retention": "7d", - "retention_determined_by": "data_stream_configuration" + "data_retention": "7d" } } } diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc index 65eaf472890f4..3125c82120d8d 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -200,10 +200,10 @@ PUT _index_template/dsl-data-stream-template "template": { "settings": { "index.lifecycle.name": "pre-dsl-ilm-policy", - "index.lifecycle.prefer_ilm": false <1> + "index.lifecycle.prefer_ilm": false <1> }, - "lifecycle": { <2> - "data_retention": "7d" <3> + "lifecycle": { + "data_retention": "7d" <2> } } } @@ -215,8 +215,6 @@ PUT _index_template/dsl-data-stream-template precedence over data stream lifecycle. <2> We're configuring the data stream lifecycle so _new_ data streams will be managed by data stream lifecycle. -<3> The desired retention, meaning that this data stream should keep the data for at least 7 days, -if this retention is possible. We've now made sure that new data streams will be managed by data stream lifecycle. @@ -270,9 +268,7 @@ GET _data_stream/dsl-data-stream "template": "dsl-data-stream-template", "lifecycle": { "enabled": true, - "data_retention": "7d", - "effective_retention": "7d", - "retention_determined_by": "data_stream_configuration" + "data_retention": "7d" }, "ilm_policy": "pre-dsl-ilm-policy", "next_generation_managed_by": "Data stream lifecycle", <3> @@ -350,9 +346,7 @@ GET _data_stream/dsl-data-stream "template": "dsl-data-stream-template", "lifecycle": { "enabled": true, - "data_retention": "7d", - "effective_retention": "7d", - "retention_determined_by": "data_stream_configuration" + "data_retention": "7d" }, "ilm_policy": "pre-dsl-ilm-policy", "next_generation_managed_by": "Data stream lifecycle", diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionIT.java deleted file mode 100644 index 557e70ba65e9b..0000000000000 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionIT.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.datastreams.lifecycle; - -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.datastreams.DisabledSecurityDataStreamTestCase; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; - -public class DataStreamGlobalRetentionIT extends DisabledSecurityDataStreamTestCase { - - @Before - public void setup() throws IOException { - updateClusterSettings( - Settings.builder() - .put("data_streams.lifecycle.poll_interval", "1s") - .put("cluster.lifecycle.default.rollover", "min_docs=1,max_docs=1") - .build() - ); - // Create a template with the default lifecycle - Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/1"); - putComposableIndexTemplateRequest.setJsonEntity(""" - { - "index_patterns": ["my-data-stream*"], - "data_stream": {}, - "template": { - "lifecycle": {} - } - } - """); - assertOK(client().performRequest(putComposableIndexTemplateRequest)); - - // Create a data streams with one doc - Request createDocRequest = new Request("POST", "/my-data-stream/_doc?refresh=true"); - createDocRequest.setJsonEntity("{ \"@timestamp\": \"2022-12-12\"}"); - assertOK(client().performRequest(createDocRequest)); - } - - @After - public void cleanUp() throws IOException { - adminClient().performRequest(new Request("DELETE", "_data_stream/*")); - } - - @SuppressWarnings("unchecked") - public void testDefaultRetention() throws Exception { - { - // Set global retention - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "default_retention": "10s" - }"""); - assertAcknowledged(client().performRequest(request)); - } - - // Verify that the effective retention matches the default retention - { - Request request = new Request("GET", "/_data_stream/my-data-stream"); - Response response = client().performRequest(request); - List dataStreams = (List) entityAsMap(response).get("data_streams"); - assertThat(dataStreams.size(), is(1)); - Map dataStream = (Map) dataStreams.get(0); - assertThat(dataStream.get("name"), is("my-data-stream")); - Map lifecycle = (Map) dataStream.get("lifecycle"); - assertThat(lifecycle.get("effective_retention"), is("10s")); - assertThat(lifecycle.get("retention_determined_by"), is("default_global_retention")); - assertThat(lifecycle.get("data_retention"), nullValue()); - } - - // Verify that the first generation index was removed - assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "/_data_stream/my-data-stream")); - Map dataStream = ((List>) entityAsMap(response).get("data_streams")).get(0); - assertThat(dataStream.get("name"), is("my-data-stream")); - List backingIndices = (List) dataStream.get("indices"); - assertThat(backingIndices.size(), is(1)); - // 2 backing indices created + 1 for the deleted index - assertThat(dataStream.get("generation"), is(3)); - }, 20, TimeUnit.SECONDS); - } - - @SuppressWarnings("unchecked") - public void testMaxRetention() throws Exception { - { - // Set global retention - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "max_retention": "10s" - }"""); - assertAcknowledged(client().performRequest(request)); - } - boolean withDataStreamLevelRetention = randomBoolean(); - if (withDataStreamLevelRetention) { - Request request = new Request("PUT", "_data_stream/my-data-stream/_lifecycle"); - request.setJsonEntity(""" - { - "data_retention": "30d" - }"""); - assertAcknowledged(client().performRequest(request)); - } - - // Verify that the effective retention matches the max retention - { - Request request = new Request("GET", "/_data_stream/my-data-stream"); - Response response = client().performRequest(request); - List dataStreams = (List) entityAsMap(response).get("data_streams"); - assertThat(dataStreams.size(), is(1)); - Map dataStream = (Map) dataStreams.get(0); - assertThat(dataStream.get("name"), is("my-data-stream")); - Map lifecycle = (Map) dataStream.get("lifecycle"); - assertThat(lifecycle.get("effective_retention"), is("10s")); - assertThat(lifecycle.get("retention_determined_by"), is("max_global_retention")); - if (withDataStreamLevelRetention) { - assertThat(lifecycle.get("data_retention"), is("30d")); - } else { - assertThat(lifecycle.get("data_retention"), nullValue()); - } - } - - // Verify that the first generation index was removed - assertBusy(() -> { - Response response = client().performRequest(new Request("GET", "/_data_stream/my-data-stream")); - Map dataStream = ((List>) entityAsMap(response).get("data_streams")).get(0); - assertThat(dataStream.get("name"), is("my-data-stream")); - List backingIndices = (List) dataStream.get("indices"); - assertThat(backingIndices.size(), is(1)); - // 2 backing indices created + 1 for the deleted index - assertThat(dataStream.get("generation"), is(3)); - }, 20, TimeUnit.SECONDS); - } -} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionPermissionsRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionPermissionsRestIT.java deleted file mode 100644 index e2e82b343fc5f..0000000000000 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamGlobalRetentionPermissionsRestIT.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.datastreams.lifecycle; - -import org.apache.http.HttpHost; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.cluster.util.resource.Resource; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.ClassRule; - -import java.util.Map; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -public class DataStreamGlobalRetentionPermissionsRestIT extends ESRestTestCase { - - private static final String PASSWORD = "secret-test-password"; - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .feature(FeatureFlag.FAILURE_STORE_ENABLED) - .setting("xpack.watcher.enabled", "false") - .setting("xpack.ml.enabled", "false") - .setting("xpack.security.enabled", "true") - .setting("xpack.security.transport.ssl.enabled", "false") - .setting("xpack.security.http.ssl.enabled", "false") - .user("test_admin", PASSWORD, "superuser", false) - .user("test_manage_global_retention", PASSWORD, "manage_data_stream_global_retention", false) - .user("test_monitor_global_retention", PASSWORD, "monitor_data_stream_global_retention", false) - .user("test_monitor", PASSWORD, "manage_data_stream_lifecycle", false) - .user("test_no_privilege", PASSWORD, "no_privilege", false) - .rolesFile(Resource.fromClasspath("roles.yml")) - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Override - protected Settings restClientSettings() { - // If this test is running in a test framework that handles its own authorization, we don't want to overwrite it. - if (super.restClientSettings().keySet().contains(ThreadContext.PREFIX + ".Authorization")) { - return super.restClientSettings(); - } else { - // Note: This user is assigned the role "manage_data_stream_lifecycle". That role is defined in roles.yml. - String token = basicAuthHeaderValue("test_data_stream_lifecycle", new SecureString(PASSWORD.toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - } - - @Override - protected Settings restAdminSettings() { - // If this test is running in a test framework that handles its own authorization, we don't want to overwrite it. - if (super.restClientSettings().keySet().contains(ThreadContext.PREFIX + ".Authorization")) { - return super.restClientSettings(); - } else { - // Note: We use the admin user because the other one is too unprivileged, so it breaks the initialization of the test - String token = basicAuthHeaderValue("test_admin", new SecureString(PASSWORD.toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - } - - private Settings restManageGlobalRetentionClientSettings() { - String token = basicAuthHeaderValue("test_manage_global_retention", new SecureString(PASSWORD.toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - - private Settings restMonitorGlobalRetentionClientSettings() { - String token = basicAuthHeaderValue("test_monitor_global_retention", new SecureString(PASSWORD.toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - - private Settings restOnlyManageLifecycleClientSettings() { - String token = basicAuthHeaderValue("test_monitor", new SecureString(PASSWORD.toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - - private Settings restNoPrivilegeClientSettings() { - String token = basicAuthHeaderValue("test_no_privilege", new SecureString(PASSWORD.toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - - public void testManageGlobalRetentionPrivileges() throws Exception { - try (var client = buildClient(restManageGlobalRetentionClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "default_retention": "1d", - "max_retention": "7d" - }"""); - assertAcknowledged(client.performRequest(request)); - Map response = entityAsMap(client.performRequest(new Request("GET", "/_data_stream/_global_retention"))); - assertThat(response.get("default_retention"), equalTo("1d")); - assertThat(response.get("max_retention"), equalTo("7d")); - assertAcknowledged(client.performRequest(new Request("DELETE", "/_data_stream/_global_retention"))); - } - } - - public void testMonitorGlobalRetentionPrivileges() throws Exception { - { - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "default_retention": "1d", - "max_retention": "7d" - }"""); - assertAcknowledged(adminClient().performRequest(request)); - } - try (var client = buildClient(restMonitorGlobalRetentionClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "default_retention": "1d", - "max_retention": "7d" - }"""); - ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(request)); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(403)); - assertThat( - responseException.getMessage(), - containsString( - "action [cluster:admin/data_stream/global_retention/put] is unauthorized for user [test_monitor_global_retention]" - ) - ); - responseException = expectThrows( - ResponseException.class, - () -> client.performRequest(new Request("DELETE", "/_data_stream/_global_retention")) - ); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(403)); - assertThat( - responseException.getMessage(), - containsString( - "action [cluster:admin/data_stream/global_retention/delete] is unauthorized for user [test_monitor_global_retention]" - ) - ); - Map response = entityAsMap(client.performRequest(new Request("GET", "/_data_stream/_global_retention"))); - assertThat(response.get("default_retention"), equalTo("1d")); - assertThat(response.get("max_retention"), equalTo("7d")); - } - } - - public void testManageLifecyclePrivileges() throws Exception { - try (var client = buildClient(restOnlyManageLifecycleClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "default_retention": "1d", - "max_retention": "7d" - }"""); - ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(request)); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(403)); - assertThat( - responseException.getMessage(), - containsString("action [cluster:admin/data_stream/global_retention/put] is unauthorized for user [test_monitor]") - ); - // This use has the monitor privilege which includes the monitor_data_stream_global_retention - Response response = client.performRequest(new Request("GET", "/_data_stream/_global_retention")); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - } - } - - public void testNoPrivileges() throws Exception { - try (var client = buildClient(restNoPrivilegeClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { - Request request = new Request("PUT", "_data_stream/_global_retention"); - request.setJsonEntity(""" - { - "default_retention": "1d", - "max_retention": "7d" - }"""); - ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(request)); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(403)); - assertThat( - responseException.getMessage(), - containsString("action [cluster:admin/data_stream/global_retention/put] is unauthorized for user [test_no_privilege]") - ); - responseException = expectThrows( - ResponseException.class, - () -> client.performRequest(new Request("DELETE", "/_data_stream/_global_retention")) - ); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(403)); - assertThat( - responseException.getMessage(), - containsString("action [cluster:admin/data_stream/global_retention/delete] is unauthorized for user [test_no_privilege]") - ); - responseException = expectThrows( - ResponseException.class, - () -> client.performRequest(new Request("GET", "/_data_stream/_global_retention")) - ); - assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(403)); - assertThat( - responseException.getMessage(), - containsString("action [cluster:monitor/data_stream/global_retention/get] is unauthorized for user [test_no_privilege]") - ); - } - } -} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index 53e0bc287d3ec..c18da970ca26b 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -58,12 +58,9 @@ import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthIndicatorService; import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.datastreams.lifecycle.rest.RestDataStreamLifecycleStatsAction; -import org.elasticsearch.datastreams.lifecycle.rest.RestDeleteDataStreamGlobalRetentionAction; import org.elasticsearch.datastreams.lifecycle.rest.RestDeleteDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestExplainDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.rest.RestGetDataStreamGlobalRetentionAction; import org.elasticsearch.datastreams.lifecycle.rest.RestGetDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.rest.RestPutDataStreamGlobalRetentionAction; import org.elasticsearch.datastreams.lifecycle.rest.RestPutDataStreamLifecycleAction; import org.elasticsearch.datastreams.rest.RestCreateDataStreamAction; import org.elasticsearch.datastreams.rest.RestDataStreamsStatsAction; @@ -290,9 +287,6 @@ public List getRestHandlers( handlers.add(new RestDeleteDataStreamLifecycleAction()); handlers.add(new RestExplainDataStreamLifecycleAction()); handlers.add(new RestDataStreamLifecycleStatsAction()); - handlers.add(new RestPutDataStreamGlobalRetentionAction()); - handlers.add(new RestGetDataStreamGlobalRetentionAction()); - handlers.add(new RestDeleteDataStreamGlobalRetentionAction()); return handlers; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java index 5bfdf2d382005..7af70f98e8ce7 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -216,11 +215,7 @@ public Iterator toXContentChunked(ToXContent.Params outerP return builder; }), Iterators.map(indices.iterator(), explainIndexDataLifecycle -> (builder, params) -> { builder.field(explainIndexDataLifecycle.getIndex()); - ToXContent.Params withEffectiveRetentionParams = new ToXContent.DelegatingMapParams( - DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, - params - ); - explainIndexDataLifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + explainIndexDataLifecycle.toXContent(builder, params, rolloverConfiguration, globalRetention); return builder; }), Iterators.single((builder, params) -> { builder.endObject(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java index 79e1b71771559..0a5bda11f2a22 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java @@ -237,17 +237,22 @@ public Iterator toXContentChunked(ToXContent.Params outerParams) { builder.startObject(); builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); return builder; - }), Iterators.map(dataStreamLifecycles.iterator(), dataStreamLifecycle -> (builder, params) -> { - ToXContent.Params withEffectiveRetentionParams = new ToXContent.DelegatingMapParams( - org.elasticsearch.cluster.metadata.DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, - params - ); - return dataStreamLifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); - }), Iterators.single((builder, params) -> { - builder.endArray(); - builder.endObject(); - return builder; - })); + }), + Iterators.map( + dataStreamLifecycles.iterator(), + dataStreamLifecycle -> (builder, params) -> dataStreamLifecycle.toXContent( + builder, + params, + rolloverConfiguration, + globalRetention + ) + ), + Iterators.single((builder, params) -> { + builder.endArray(); + builder.endObject(); + return builder; + }) + ); } @Override diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamGlobalRetentionAction.java deleted file mode 100644 index 1ac12c918605f..0000000000000 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamGlobalRetentionAction.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.datastreams.lifecycle.rest; - -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.datastreams.lifecycle.action.DeleteDataStreamGlobalRetentionAction; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestChunkedToXContentListener; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.rest.RestRequest.Method.DELETE; - -/** - * Removes the data stream global retention configuration - */ -@ServerlessScope(Scope.PUBLIC) -public class RestDeleteDataStreamGlobalRetentionAction extends BaseRestHandler { - - @Override - public String getName() { - return "delete_data_stream_global_retention_action"; - } - - @Override - public List routes() { - return List.of(new Route(DELETE, "/_data_stream/_global_retention")); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - DeleteDataStreamGlobalRetentionAction.Request request = new DeleteDataStreamGlobalRetentionAction.Request(); - request.dryRun(restRequest.paramAsBoolean("dry_run", false)); - return channel -> client.execute( - DeleteDataStreamGlobalRetentionAction.INSTANCE, - request, - new RestChunkedToXContentListener<>(channel) - ); - } -} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamGlobalRetentionAction.java deleted file mode 100644 index cbe403af35f72..0000000000000 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamGlobalRetentionAction.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.datastreams.lifecycle.rest; - -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.datastreams.lifecycle.action.GetDataStreamGlobalRetentionAction; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestToXContentListener; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.rest.RestRequest.Method.GET; - -/** - * Retrieves the data stream global retention configuration. - */ -@ServerlessScope(Scope.PUBLIC) -public class RestGetDataStreamGlobalRetentionAction extends BaseRestHandler { - - @Override - public String getName() { - return "get_data_stream_global_retention_action"; - } - - @Override - public List routes() { - return List.of(new Route(GET, "/_data_stream/_global_retention")); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - GetDataStreamGlobalRetentionAction.Request request = new GetDataStreamGlobalRetentionAction.Request(); - request.local(restRequest.paramAsBoolean("local", request.local())); - request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); - - return channel -> client.execute(GetDataStreamGlobalRetentionAction.INSTANCE, request, new RestToXContentListener<>(channel)); - } -} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamGlobalRetentionAction.java deleted file mode 100644 index 5331c4df16db0..0000000000000 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamGlobalRetentionAction.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.datastreams.lifecycle.rest; - -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamGlobalRetentionAction; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.Scope; -import org.elasticsearch.rest.ServerlessScope; -import org.elasticsearch.rest.action.RestChunkedToXContentListener; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.rest.RestRequest.Method.PUT; - -/** - * Updates the default_retention and the max_retention of the data stream global retention configuration. It - * does not accept an empty payload. - */ -@ServerlessScope(Scope.PUBLIC) -public class RestPutDataStreamGlobalRetentionAction extends BaseRestHandler { - - @Override - public String getName() { - return "put_data_stream_global_retention_action"; - } - - @Override - public List routes() { - return List.of(new Route(PUT, "/_data_stream/_global_retention")); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - try (XContentParser parser = restRequest.contentParser()) { - PutDataStreamGlobalRetentionAction.Request request = PutDataStreamGlobalRetentionAction.Request.parseRequest(parser); - request.dryRun(restRequest.paramAsBoolean("dry_run", false)); - return channel -> client.execute( - PutDataStreamGlobalRetentionAction.INSTANCE, - request, - new RestChunkedToXContentListener<>(channel) - ); - } - } -} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java index 462c0626c6296..6b9184ea09e0b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java @@ -37,7 +37,6 @@ import static org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction.Response; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -196,23 +195,6 @@ public void testToXContent() throws IOException { Map lifecycleMap = (Map) explainIndexMap.get("lifecycle"); assertThat(lifecycleMap.get("data_retention"), nullValue()); - if (response.getGlobalRetention() == null) { - assertThat(lifecycleMap.get("effective_retention"), nullValue()); - assertThat(lifecycleMap.get("retention_determined_by"), nullValue()); - } else if (response.getGlobalRetention().getDefaultRetention() != null) { - assertThat( - lifecycleMap.get("effective_retention"), - equalTo(response.getGlobalRetention().getDefaultRetention().getStringRep()) - ); - assertThat(lifecycleMap.get("retention_determined_by"), equalTo("default_global_retention")); - } else { - assertThat( - lifecycleMap.get("effective_retention"), - equalTo(response.getGlobalRetention().getMaxRetention().getStringRep()) - ); - assertThat(lifecycleMap.get("retention_determined_by"), equalTo("max_global_retention")); - } - Map lifecycleRollover = (Map) lifecycleMap.get("rollover"); assertThat(lifecycleRollover.get("min_primary_shard_docs"), is(4)); assertThat(lifecycleRollover.get("max_primary_shard_docs"), is(9)); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml index c79775c51c392..d03174b448ff2 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/10_explain_lifecycle.yml @@ -1,9 +1,9 @@ --- "Explain backing index lifecycle": - - skip: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was released in 8.14" - features: allowed_warnings + - requires: + cluster_features: [ "gte_v8.11.0" ] + reason: "Data stream lifecycle was released as tech preview in 8.11" + test_runner_features: allowed_warnings - do: allowed_warnings: - "index template [template-with-lifecycle] has index patterns [managed-data-stream] matching patterns from existing older templates [global] with patterns (global => [*]); this template [template-with-lifecycle] will take precedence during new index creation" @@ -36,9 +36,7 @@ indices.explain_data_lifecycle: index: $backing_index - match: { indices.$backing_index.managed_by_lifecycle: true } - - match: { indices.$backing_index.lifecycle.data_retention: "30d" } - - match: { indices.$backing_index.lifecycle.effective_retention: "30d"} - - match: { indices.$backing_index.lifecycle.retention_determined_by: "data_stream_configuration"} + - match: { indices.$backing_index.lifecycle.data_retention: '30d' } - match: { indices.$backing_index.lifecycle.enabled: true } - is_false: indices.$backing_index.lifecycle.rollover @@ -48,9 +46,7 @@ index: $backing_index include_defaults: true - match: { indices.$backing_index.managed_by_lifecycle: true } - - match: { indices.$backing_index.lifecycle.data_retention: "30d" } - - match: { indices.$backing_index.lifecycle.effective_retention: "30d"} - - match: { indices.$backing_index.lifecycle.retention_determined_by: "data_stream_configuration"} + - match: { indices.$backing_index.lifecycle.data_retention: '30d' } - is_true: indices.$backing_index.lifecycle.rollover diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/190_create_data_stream_with_lifecycle.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/190_create_data_stream_with_lifecycle.yml index 745fd342d3a43..e13f245855f8c 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/190_create_data_stream_with_lifecycle.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/190_create_data_stream_with_lifecycle.yml @@ -1,8 +1,8 @@ --- "Create data stream with lifecycle": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + cluster_features: [ "gte_v8.11.0" ] + reason: "Data stream lifecycle was GA in 8.11" test_runner_features: allowed_warnings - do: allowed_warnings: @@ -35,7 +35,5 @@ - match: { data_streams.0.template: 'template-with-lifecycle' } - match: { data_streams.0.hidden: false } - match: { data_streams.0.lifecycle.data_retention: '30d' } - - match: { data_streams.0.lifecycle.effective_retention: '30d'} - - match: { data_streams.0.lifecycle.retention_determined_by: 'data_stream_configuration'} - match: { data_streams.0.lifecycle.enabled: true } - is_true: data_streams.0.lifecycle.rollover diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml index ea34c6880d1f6..18aee1bf77232 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/20_basic.yml @@ -1,8 +1,8 @@ setup: - skip: features: allowed_warnings - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycles with global retention are only supported in 8.14+" + cluster_features: ["gte_v8.11.0"] + reason: "Data stream lifecycles only supported in 8.11+" - do: allowed_warnings: - "index template [my-lifecycle] has index patterns [data-stream-with-lifecycle] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-lifecycle] will take precedence during new index creation" @@ -47,8 +47,6 @@ setup: - length: { data_streams: 1} - match: { data_streams.0.name: data-stream-with-lifecycle } - match: { data_streams.0.lifecycle.data_retention: '10d' } - - match: { data_streams.0.lifecycle.effective_retention: '10d' } - - match: { data_streams.0.lifecycle.retention_determined_by: 'data_stream_configuration' } - match: { data_streams.0.lifecycle.enabled: true} --- @@ -63,7 +61,6 @@ setup: - length: { data_streams: 1} - match: { data_streams.0.name: simple-data-stream1 } - match: { data_streams.0.lifecycle.enabled: true} - - is_false: data_streams.0.lifecycle.effective_retention --- "Put data stream lifecycle": @@ -95,7 +92,6 @@ setup: - length: { data_streams: 2 } - match: { data_streams.0.name: data-stream-with-lifecycle } - match: { data_streams.0.lifecycle.data_retention: "30d" } - - is_false: data_streams.0.lifecycle.effective_retention - match: { data_streams.0.lifecycle.enabled: false} - match: { data_streams.0.lifecycle.downsampling.0.after: '10d'} - match: { data_streams.0.lifecycle.downsampling.0.fixed_interval: '1h'} @@ -103,7 +99,6 @@ setup: - match: { data_streams.0.lifecycle.downsampling.1.fixed_interval: '10h'} - match: { data_streams.1.name: simple-data-stream1 } - match: { data_streams.1.lifecycle.data_retention: "30d" } - - is_false: data_streams.0.lifecycle.effective_retention - match: { data_streams.1.lifecycle.enabled: false} - match: { data_streams.1.lifecycle.downsampling.0.after: '10d'} - match: { data_streams.1.lifecycle.downsampling.0.fixed_interval: '1h'} @@ -129,8 +124,6 @@ setup: - match: { data_streams.0.lifecycle.enabled: true} - match: { data_streams.1.name: simple-data-stream1 } - match: { data_streams.1.lifecycle.data_retention: "30d" } - - match: { data_streams.1.lifecycle.effective_retention: "30d"} - - match: { data_streams.1.lifecycle.retention_determined_by: "data_stream_configuration"} - match: { data_streams.1.lifecycle.enabled: true} @@ -144,8 +137,6 @@ setup: - length: { data_streams: 1} - match: { data_streams.0.name: data-stream-with-lifecycle } - match: { data_streams.0.lifecycle.data_retention: "10d" } - - match: { data_streams.0.lifecycle.effective_retention: "10d"} - - match: { data_streams.0.lifecycle.retention_determined_by: "data_stream_configuration"} - is_true: data_streams.0.lifecycle.rollover --- @@ -163,8 +154,6 @@ setup: - length: { data_streams: 1 } - match: { data_streams.0.name: simple-data-stream1 } - match: { data_streams.0.lifecycle.data_retention: "30d" } - - match: { data_streams.0.lifecycle.effective_retention: "30d"} - - match: { data_streams.0.lifecycle.retention_determined_by: "data_stream_configuration"} - match: { data_streams.0.lifecycle.enabled: true } - do: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml index 303fbddd6c19c..24d0a5649a619 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/30_not_found.yml @@ -23,18 +23,13 @@ setup: --- "Get data stream lifecycle": - - skip: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was released in 8.14" - do: indices.get_data_lifecycle: name: "*" - length: { data_streams: 1} - match: { data_streams.0.name: my-data-stream-1 } - - match: { data_streams.0.lifecycle.data_retention: "10d" } - - match: { data_streams.0.lifecycle.effective_retention: "10d"} - - match: { data_streams.0.lifecycle.retention_determined_by: "data_stream_configuration"} + - match: { data_streams.0.lifecycle.data_retention: '10d' } - match: { data_streams.0.lifecycle.enabled: true} --- @@ -48,9 +43,7 @@ setup: --- "Put data stream lifecycle does not succeed when at lease one data stream does not exist": - - skip: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + - do: catch: missing indices.put_data_lifecycle: @@ -64,16 +57,12 @@ setup: name: "*" - length: { data_streams: 1 } - match: { data_streams.0.name: my-data-stream-1 } - - match: { data_streams.0.lifecycle.data_retention: "10d" } - - match: { data_streams.0.lifecycle.effective_retention: "10d"} - - match: { data_streams.0.lifecycle.retention_determined_by: "data_stream_configuration"} + - match: { data_streams.0.lifecycle.data_retention: '10d' } - match: { data_streams.0.lifecycle.enabled: true } --- "Delete data stream lifecycle does not succeed when at lease one data stream does not exist": - - skip: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + - do: catch: missing indices.delete_data_lifecycle: @@ -85,7 +74,5 @@ setup: name: "*" - length: { data_streams: 1 } - match: { data_streams.0.name: my-data-stream-1 } - - match: { data_streams.0.lifecycle.data_retention: "10d" } - - match: { data_streams.0.lifecycle.effective_retention: "10d"} - - match: { data_streams.0.lifecycle.retention_determined_by: "data_stream_configuration"} + - match: { data_streams.0.lifecycle.data_retention: '10d' } - match: { data_streams.0.lifecycle.enabled: true } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml deleted file mode 100644 index 93df045e4568e..0000000000000 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/lifecycle/40_global_retention.yml +++ /dev/null @@ -1,139 +0,0 @@ -setup: - - skip: - features: allowed_warnings - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Global retention was added in 8.14" - - do: - allowed_warnings: - - "index template [my-lifecycle] has index patterns [my-data-stream-1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-lifecycle] will take precedence during new index creation" - indices.put_index_template: - name: my-lifecycle - body: - index_patterns: [my-data-stream-*] - template: - settings: - index.number_of_replicas: 0 - lifecycle: {} - data_stream: {} - ---- -"CRUD global retention": - - do: - indices.create_data_stream: - name: my-data-stream-1 - - do: - cluster.health: - index: my-data-stream-1 - wait_for_status: green - - do: - data_streams.put_global_retention: - body: - default_retention: "7d" - max_retention: "90d" - - is_true: acknowledged - - is_false: dry_run - - match: {affected_data_streams.0.name: "my-data-stream-1"} - - match: {affected_data_streams.0.previous_effective_retention: "infinite"} - - match: {affected_data_streams.0.new_effective_retention: "7d"} - - - do: - data_streams.get_global_retention: { } - - match: { default_retention: "7d" } - - match: { max_retention: "90d" } - - - do: - data_streams.delete_global_retention: { } - - is_true: acknowledged - - is_false: dry_run - - match: { affected_data_streams.0.name: "my-data-stream-1" } - - match: { affected_data_streams.0.previous_effective_retention: "7d" } - - match: { affected_data_streams.0.new_effective_retention: "infinite" } - - - do: - data_streams.get_global_retention: { } - - is_false: default_retention - - is_false: max_retention - - - do: - indices.delete_data_stream: - name: my-data-stream-1 ---- -"Dry run global retention": - - do: - indices.create_data_stream: - name: my-data-stream-2 - - do: - indices.put_data_lifecycle: - name: "my-data-stream-2" - body: > - { - "data_retention": "90d" - } - - is_true: acknowledged - - - do: - data_streams.put_global_retention: - dry_run: true - body: - default_retention: "7d" - max_retention: "30d" - - is_false: acknowledged - - is_true: dry_run - - match: {affected_data_streams.0.name: "my-data-stream-2"} - - match: {affected_data_streams.0.previous_effective_retention: "90d"} - - match: {affected_data_streams.0.new_effective_retention: "30d"} - - - do: - indices.get_data_stream: - name: "my-data-stream-2" - include_defaults: true - - match: { data_streams.0.name: my-data-stream-2 } - - match: { data_streams.0.lifecycle.effective_retention: '90d' } - - match: { data_streams.0.lifecycle.retention_determined_by: 'data_stream_configuration' } - - do: - indices.delete_data_stream: - name: my-data-stream-2 ---- -"Default global retention is retrieved by data stream and index templates": - - do: - indices.create_data_stream: - name: my-data-stream-3 - - - do: - data_streams.put_global_retention: - body: - default_retention: "7d" - max_retention: "90d" - - is_true: acknowledged - - is_false: dry_run - - match: {affected_data_streams.0.name: "my-data-stream-3"} - - match: {affected_data_streams.0.previous_effective_retention: "infinite"} - - match: {affected_data_streams.0.new_effective_retention: "7d"} - - - do: - data_streams.get_global_retention: { } - - match: { default_retention: "7d" } - - match: { max_retention: "90d" } - - - do: - indices.get_data_stream: - name: "my-data-stream-3" - - match: { data_streams.0.name: my-data-stream-3 } - - match: { data_streams.0.lifecycle.effective_retention: '7d' } - - match: { data_streams.0.lifecycle.retention_determined_by: 'default_global_retention' } - - match: { data_streams.0.lifecycle.enabled: true } - - - do: - indices.get_index_template: - name: my-lifecycle - - - match: { index_templates.0.name: my-lifecycle } - - match: { index_templates.0.index_template.template.lifecycle.enabled: true } - - match: { index_templates.0.index_template.template.lifecycle.effective_retention: "7d" } - - match: { index_templates.0.index_template.template.lifecycle.retention_determined_by: "default_global_retention" } - - - do: - data_streams.delete_global_retention: { } - - do: - indices.delete_data_stream: - name: my-data-stream-3 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.delete_global_retention.json b/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.delete_global_retention.json deleted file mode 100644 index 1eb4621a7b055..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.delete_global_retention.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "data_streams.delete_global_retention":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-delete-global-retention.html", - "description":"Deletes the global retention configuration that applies to all data streams managed by the data stream lifecycle." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/json"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_data_stream/_global_retention", - "methods":[ - "DELETE" - ] - } - ] - }, - "params":{ - "dry_run":{ - "type":"boolean", - "description":"Determines whether the global retention provided should be applied or only the impact should be determined.", - "default":false - }, - "master_timeout":{ - "type":"time", - "description":"Specify timeout for connection to master." - } - } - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.get_global_retention.json b/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.get_global_retention.json deleted file mode 100644 index 9084db36d7d90..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.get_global_retention.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "data_streams.get_global_retention":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-get-global-retention.html", - "description":"Returns global retention configuration that applies to all data streams managed by the data stream lifecycle." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_data_stream/_global_retention", - "methods":[ - "GET" - ] - } - ] - }, - "params":{ - "local":{ - "type":"boolean", - "description":"Return the global retention retrieved from the node that received the request." - } - } - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.put_global_retention.json b/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.put_global_retention.json deleted file mode 100644 index 9f369f4c7616d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/data_streams.put_global_retention.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "data_streams.put_global_retention":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-put-global-retention.html", - "description":"Updates the global retention configuration that applies to all data streams managed by the data stream lifecycle." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/json"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_data_stream/_global_retention", - "methods":[ - "PUT" - ] - } - ] - }, - "params":{ - "dry_run":{ - "type":"boolean", - "description":"Determines whether the global retention provided should be applied or only the impact should be determined.", - "default":false - }, - "master_timeout":{ - "type":"time", - "description":"Specify timeout for connection to master" - } - }, - "body":{ - "description":"The global retention configuration including optional values for default and max retention.", - "required":true - } - } -} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml index af2d6f946d2ff..f698d3399f27d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml @@ -117,8 +117,8 @@ --- "Add data stream lifecycle": - requires: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with global retention was available from 8.14" + cluster_features: ["gte_v8.11.0"] + reason: "Data stream lifecycle was available from 8.11" - do: cluster.put_component_template: @@ -141,14 +141,12 @@ - match: {component_templates.0.component_template.version: 1} - match: {component_templates.0.component_template.template.lifecycle.enabled: true} - match: {component_templates.0.component_template.template.lifecycle.data_retention: "10d"} - - match: {component_templates.0.component_template.template.lifecycle.effective_retention: "10d"} - - match: {component_templates.0.component_template.template.lifecycle.retention_determined_by: "data_stream_configuration"} --- "Get data stream lifecycle with default rollover": - requires: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was available from 8.14" + cluster_features: ["gte_v8.11.0"] + reason: "Data stream lifecycle was available from 8.11" - do: cluster.put_component_template: @@ -172,6 +170,4 @@ - match: {component_templates.0.component_template.version: 1} - match: {component_templates.0.component_template.template.lifecycle.enabled: true} - match: {component_templates.0.component_template.template.lifecycle.data_retention: "10d"} - - match: {component_templates.0.component_template.template.lifecycle.effective_retention: "10d"} - - match: {component_templates.0.component_template.template.lifecycle.retention_determined_by: "data_stream_configuration"} - is_true: component_templates.0.component_template.template.lifecycle.rollover diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml index dc3361fefab6e..2079c01079ce1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_index_template/10_basic.yml @@ -93,8 +93,8 @@ setup: --- "Add data stream lifecycle": - skip: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + cluster_features: ["gte_v8.11.0"] + reason: "Data stream lifecycle in index templates was updated after 8.10" features: allowed_warnings - do: @@ -124,14 +124,12 @@ setup: - match: {index_templates.0.index_template.template.mappings: {properties: {field: {type: keyword}}}} - match: {index_templates.0.index_template.template.lifecycle.enabled: true} - match: {index_templates.0.index_template.template.lifecycle.data_retention: "30d"} - - match: {index_templates.0.index_template.template.lifecycle.effective_retention: "30d"} - - match: {index_templates.0.index_template.template.lifecycle.retention_determined_by: "data_stream_configuration"} --- "Get data stream lifecycle with default rollover": - skip: - cluster_features: ["data_stream.lifecycle.global_retention"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + cluster_features: ["gte_v8.11.0"] + reason: "Data stream lifecycle in index templates was updated after 8.10" features: allowed_warnings - do: @@ -154,13 +152,11 @@ setup: - match: {index_templates.0.index_template.index_patterns: ["data-stream-with-lifecycle-*"]} - match: {index_templates.0.index_template.template.lifecycle.enabled: true} - match: {index_templates.0.index_template.template.lifecycle.data_retention: "30d"} - - match: {index_templates.0.index_template.template.lifecycle.effective_retention: "30d"} - - match: {index_templates.0.index_template.template.lifecycle.retention_determined_by: "data_stream_configuration"} - is_true: index_templates.0.index_template.template.lifecycle.rollover --- "Reject data stream lifecycle without data stream configuration": - - skip: + - requires: cluster_features: ["gte_v8.11.0"] reason: "Data stream lifecycle in index templates was updated after 8.10" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml index 6790014be9951..81c8cf64169e2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml @@ -227,8 +227,8 @@ --- "Simulate index template with lifecycle and include defaults": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + cluster_features: ["gte_v8.11.0"] + reason: "Lifecycle is only available in 8.11+" test_runner_features: ["default_shards"] - do: @@ -248,7 +248,5 @@ - match: {template.lifecycle.enabled: true} - match: {template.lifecycle.data_retention: "7d"} - - match: {template.lifecycle.effective_retention: "7d"} - - match: {template.lifecycle.retention_determined_by: "data_stream_configuration"} - is_true: template.lifecycle.rollover - match: {overlapping: []} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml index ff53a762b75ef..236653b7ca9ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml @@ -1,9 +1,9 @@ --- "Simulate template without a template in the body": - - requires: - cluster_features: ["gte_v7.9.0"] + - skip: + version: " - 7.8.99" reason: "only available in 7.9+" - test_runner_features: ["default_shards"] + features: ["default_shards"] - do: indices.put_index_template: @@ -30,10 +30,10 @@ --- "Simulate index template specifying a new template": - - requires: - cluster_features: ["gte_v7.9.0"] + - skip: + version: " - 7.8.99" reason: "only available in 7.9+" - test_runner_features: ["default_shards"] + features: ["default_shards"] - do: indices.put_index_template: @@ -84,10 +84,10 @@ --- "Simulate template matches overlapping legacy and composable templates": - - requires: - cluster_features: ["gte_v7.9.0"] + - skip: + version: " - 7.8.99" reason: "only available in 7.9+" - test_runner_features: ["allowed_warnings", "default_shards"] + features: ["allowed_warnings", "default_shards"] - do: indices.put_template: @@ -147,10 +147,10 @@ --- "Simulate replacing a template with a newer version": - - requires: - cluster_features: ["gte_v8.0.0"] + - skip: + version: " - 7.99.99" reason: "not yet backported" - test_runner_features: ["allowed_warnings", "default_shards"] + features: ["allowed_warnings", "default_shards"] - do: indices.put_index_template: @@ -202,8 +202,8 @@ --- "Simulate template with lifecycle and include defaults": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "Data stream lifecycle with effective retention was released in 8.14" + cluster_features: [ "gte_v8.11.0" ] + reason: "Lifecycle is only available in 8.11+" test_runner_features: ["default_shards"] - do: @@ -223,6 +223,4 @@ - match: {template.lifecycle.enabled: true} - match: {template.lifecycle.data_retention: "7d"} - - match: {template.lifecycle.effective_retention: "7d"} - - match: {template.lifecycle.retention_determined_by: "data_stream_configuration"} - is_true: template.lifecycle.rollover diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index d352f1be5e65a..3bf9c3715b29a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -191,14 +190,13 @@ public int hashCode() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(COMPONENT_TEMPLATES.getPreferredName()); for (Map.Entry componentTemplate : this.componentTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), componentTemplate.getKey()); builder.field(COMPONENT_TEMPLATE.getPreferredName()); - componentTemplate.getValue().toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + componentTemplate.getValue().toXContent(builder, params, rolloverConfiguration, globalRetention); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index 668e3f8e7c10f..240fdd2ae8199 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -190,14 +189,13 @@ public int hashCode() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(INDEX_TEMPLATES.getPreferredName()); for (Map.Entry indexTemplate : this.indexTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), indexTemplate.getKey()); builder.field(INDEX_TEMPLATE.getPreferredName()); - indexTemplate.getValue().toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + indexTemplate.getValue().toXContent(builder, params, rolloverConfiguration, globalRetention); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index 378df2d7d53e7..4ff38222ccc99 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -114,11 +113,10 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); if (this.resolvedTemplate != null) { builder.field(TEMPLATE.getPreferredName()); - this.resolvedTemplate.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + this.resolvedTemplate.toXContent(builder, params, rolloverConfiguration, globalRetention); } if (this.overlappingTemplates != null) { builder.startArray(OVERLAPPING.getPreferredName()); diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 1a2103d665b38..f2a581472303b 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAutoShardingEvent; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -544,11 +543,10 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); for (DataStreamInfo dataStream : dataStreams) { - dataStream.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + dataStream.toXContent(builder, params, rolloverConfiguration, globalRetention); } builder.endArray(); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java index 2b79377fb71e0..bb6c3f90f1b0a 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java @@ -132,11 +132,7 @@ public XContentBuilder toXContent( } if (this.lifecycle != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - Params withEffectiveRetentionParams = new DelegatingMapParams( - DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, - params - ); - lifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + lifecycle.toXContent(builder, params, rolloverConfiguration, globalRetention); } if (this.error != null) { if (error.firstOccurrenceTimestamp() != -1L && error.recordedTimestamp() != -1L && error.retryCount() != -1) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java index 025f51b7df997..d31c9fddf2712 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java @@ -34,7 +34,6 @@ import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomSettings; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.not; public class GetComponentTemplateResponseTests extends AbstractWireSerializingTestCase { @Override @@ -103,9 +102,6 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws .keySet()) { assertThat(serialized, containsString(label)); } - // We check that even if there was no retention provided by the user, the global retention applies - assertThat(serialized, not(containsString("data_retention"))); - assertThat(serialized, containsString("effective_retention")); } } From a8096411792ed06f0417ff129edffbb3c4c761ea Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Wed, 17 Apr 2024 20:40:53 +0200 Subject: [PATCH 086/130] Fix typo in text_expansion query docs example (#107572) * Fix typo in docs example * fix indentation --- .../query-dsl/text-expansion-query.asciidoc | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/reference/query-dsl/text-expansion-query.asciidoc b/docs/reference/query-dsl/text-expansion-query.asciidoc index 27fca2bb56375..5b3f98b5e1ea8 100644 --- a/docs/reference/query-dsl/text-expansion-query.asciidoc +++ b/docs/reference/query-dsl/text-expansion-query.asciidoc @@ -232,12 +232,12 @@ GET my-index/_search "text_expansion":{ "ml.tokens":{ "model_id":".elser_model_2", - "model_text":"How is the weather in Jamaica?" - }, - "pruning_config": { - "tokens_freq_ratio_threshold": 5, - "tokens_weight_threshold": 0.4, - "only_score_pruned_tokens": false + "model_text":"How is the weather in Jamaica?", + "pruning_config": { + "tokens_freq_ratio_threshold": 5, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": false + } } } }, @@ -248,12 +248,12 @@ GET my-index/_search "text_expansion": { "ml.tokens": { "model_id": ".elser_model_2", - "model_text": "How is the weather in Jamaica?" - }, - "pruning_config": { - "tokens_freq_ratio_threshold": 5, - "tokens_weight_threshold": 0.4, - "only_score_pruned_tokens": true + "model_text": "How is the weather in Jamaica?", + "pruning_config": { + "tokens_freq_ratio_threshold": 5, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": true + } } } } From faf696557e93eccb55f62c007eabbb2fc24e6097 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 17 Apr 2024 14:52:41 -0400 Subject: [PATCH 087/130] Add 8.14 to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index 772693505b9e0..daf6d249f7268 100644 --- a/branches.json +++ b/branches.json @@ -4,6 +4,9 @@ { "branch": "main" }, + { + "branch": "8.14" + }, { "branch": "8.13" }, From 91d3bb026fa8272bb847e84f85cc18f6f93ebc2a Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Wed, 17 Apr 2024 14:58:20 -0400 Subject: [PATCH 088/130] [ci] Add checkPart4 to missing locations (#107552) --- .buildkite/pipelines/lucene-snapshot/run-tests.yml | 8 ++++++++ .buildkite/pipelines/periodic-platform-support.yml | 2 ++ .buildkite/pipelines/periodic.template.yml | 2 ++ .buildkite/pipelines/periodic.yml | 2 ++ 4 files changed, 14 insertions(+) diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index 15d78f8495ca8..a5d3c4e5f7935 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -32,6 +32,14 @@ steps: image: family/elasticsearch-ubuntu-2004 machineType: custom-32-98304 buildDirectory: /dev/shm/bk + - label: part4 + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4 + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: custom-32-98304 + buildDirectory: /dev/shm/bk - group: bwc-snapshots steps: - label: "{{matrix.BWC_VERSION}} / bwc-snapshots" diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 0240fd03f4a89..a3922d8226924 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -47,6 +47,7 @@ steps: - checkPart1 - checkPart2 - checkPart3 + - checkPart4 - checkRestCompat agents: provider: gcp @@ -70,6 +71,7 @@ steps: - checkPart1 - checkPart2 - checkPart3 + - checkPart4 - checkRestCompat agents: provider: aws diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 05d516992a7f6..7315dc9de260f 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -49,6 +49,7 @@ steps: - checkPart1 - checkPart2 - checkPart3 + - checkPart4 - checkRestCompat agents: provider: gcp @@ -89,6 +90,7 @@ steps: - checkPart1 - checkPart2 - checkPart3 + - checkPart4 - checkRestCompat agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 9291ec2efcbd9..64ea30266bf33 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -390,6 +390,7 @@ steps: - checkPart1 - checkPart2 - checkPart3 + - checkPart4 - checkRestCompat agents: provider: gcp @@ -430,6 +431,7 @@ steps: - checkPart1 - checkPart2 - checkPart3 + - checkPart4 - checkRestCompat agents: provider: gcp From d9b8245a10a22db941eddf239ee58199e295307a Mon Sep 17 00:00:00 2001 From: Sander Philipse <94373878+sphilipse@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:43:46 +0200 Subject: [PATCH 089/130] [Search] Add default ingest pipeline (#107558) * [Search] Add default ingest pipeline * Spotless apply --- .../entsearch/search_default_pipeline.json | 130 ++++++++++++++++++ .../connector/ConnectorTemplateRegistry.java | 9 ++ 2 files changed, 139 insertions(+) create mode 100644 x-pack/plugin/core/template-resources/src/main/resources/entsearch/search_default_pipeline.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/search_default_pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/search_default_pipeline.json new file mode 100644 index 0000000000000..bd556900a42e1 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/search_default_pipeline.json @@ -0,0 +1,130 @@ +{ + "version": ${xpack.application.connector.template.version}, + "description": "Default search ingest pipeline", + "_meta": { + "managed_by": "Search", + "managed": true + }, + "processors": [ + { + "attachment": { + "description": "Extract text from binary attachments", + "field": "_attachment", + "target_field": "_extracted_attachment", + "ignore_missing": true, + "indexed_chars_field": "_attachment_indexed_chars", + "if": "ctx?._extract_binary_content == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'attachment' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ], + "remove_binary": false + } + }, + { + "set": { + "tag": "set_body", + "description": "Set any extracted text on the 'body' field", + "field": "body", + "copy_from": "_extracted_attachment.content", + "ignore_empty_value": true, + "if": "ctx?._extract_binary_content == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'set' with tag 'set_body' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "gsub": { + "tag": "remove_replacement_chars", + "description": "Remove unicode 'replacement' characters", + "field": "body", + "pattern": "�", + "replacement": "", + "ignore_missing": true, + "if": "ctx?._extract_binary_content == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'gsub' with tag 'remove_replacement_chars' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "gsub": { + "tag": "remove_extra_whitespace", + "description": "Squish whitespace", + "field": "body", + "pattern": "\\s+", + "replacement": " ", + "ignore_missing": true, + "if": "ctx?._reduce_whitespace == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'gsub' with tag 'remove_extra_whitespace' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "trim": { + "description": "Trim leading and trailing whitespace", + "field": "body", + "ignore_missing": true, + "if": "ctx?._reduce_whitespace == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'trim' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "remove": { + "tag": "remove_meta_fields", + "description": "Remove meta fields", + "field": [ + "_attachment", + "_attachment_indexed_chars", + "_extracted_attachment", + "_extract_binary_content", + "_reduce_whitespace", + "_run_ml_inference" + ], + "ignore_missing": true, + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'remove' with tag 'remove_meta_fields' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + } + ] +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index c57650541b416..e4ce4d8181fd8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -54,6 +54,9 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { public static final String ENT_SEARCH_GENERIC_PIPELINE_NAME = "ent-search-generic-ingestion"; public static final String ENT_SEARCH_GENERIC_PIPELINE_FILE = "generic_ingestion_pipeline"; + public static final String SEARCH_DEFAULT_PIPELINE_NAME = "search-default-ingestion"; + public static final String SEARCH_DEFAULT_PIPELINE_FILE = "search_default_pipeline"; + // Resource config public static final String ROOT_RESOURCE_PATH = "/entsearch/"; public static final String ROOT_TEMPLATE_RESOURCE_PATH = ROOT_RESOURCE_PATH + "connector/"; @@ -115,6 +118,12 @@ protected List getIngestPipelines() { ROOT_RESOURCE_PATH + ENT_SEARCH_GENERIC_PIPELINE_FILE + ".json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE + ), + new JsonIngestPipelineConfig( + SEARCH_DEFAULT_PIPELINE_NAME, + ROOT_RESOURCE_PATH + SEARCH_DEFAULT_PIPELINE_FILE + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE ) ); } From 61a2c52f18b4e38d8be695a677a166e0cac5212d Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 18 Apr 2024 12:24:17 +0100 Subject: [PATCH 090/130] Fix `CONCURRENT_REPOSITORY_WRITERS` link (#107603) This page was split up in #104614 but the `ReferenceDocs` symbol links to the top-level page still rather than the correct subpage. This fixes the link. --- .../org/elasticsearch/common/reference-docs-links.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json index ead7387b0e1ac..503f02b25eb8d 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json @@ -4,7 +4,7 @@ "UNSTABLE_CLUSTER_TROUBLESHOOTING": "troubleshooting-unstable-cluster.html", "LAGGING_NODE_TROUBLESHOOTING": "troubleshooting-unstable-cluster.html#_diagnosing_lagging_nodes_2", "SHARD_LOCK_TROUBLESHOOTING": "troubleshooting-unstable-cluster.html#_diagnosing_shardlockobtainfailedexception_failures_2", - "CONCURRENT_REPOSITORY_WRITERS": "add-repository.html", + "CONCURRENT_REPOSITORY_WRITERS": "diagnosing-corrupted-repositories.html", "ARCHIVE_INDICES": "archive-indices.html", "HTTP_TRACER": "modules-network.html#http-rest-request-tracer", "LOGGING": "logging.html", From 9adf2422dfeec594eb6ac9cff81152084d01c2dc Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 18 Apr 2024 12:24:45 +0100 Subject: [PATCH 091/130] Add links to repo troubleshooting sub-pages (#107604) Since #104614 the top-level repo troubleshooting page is just a short paragraph which talks about "this page" but in fact refers to information spread across a number of subsequent pages. It's not obvious to the reader that they need to use the navigation menu to get to the information they seek. Moreover we link to this page from an exception message today so there's a reasonable chance that users will find it when trying to troubleshoot a genuine problem. This commit rewords things slightly and adds links to the subsequent pages to the body of the page to avoid this confusion. --- .../troubleshooting/snapshot/add-repository.asciidoc | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/reference/troubleshooting/snapshot/add-repository.asciidoc b/docs/reference/troubleshooting/snapshot/add-repository.asciidoc index 386c2561c03c6..e70bd244df3a5 100644 --- a/docs/reference/troubleshooting/snapshot/add-repository.asciidoc +++ b/docs/reference/troubleshooting/snapshot/add-repository.asciidoc @@ -2,8 +2,12 @@ == Troubleshooting broken repositories There are several situations where the <> might report an issue -regarding the integrity of snapshot repositories in the cluster. This page explains -the recommended actions for diagnosing corrupted, unknown, and invalid repositories. +regarding the integrity of snapshot repositories in the cluster. The following pages explain +the recommended actions for diagnosing corrupted, unknown, and invalid repositories: + +- <> +- <> +- <> [[diagnosing-corrupted-repositories]] === Diagnosing corrupted repositories From 9a8e1bf871f4144edc0e11eb98a8cf93700a5d9c Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 18 Apr 2024 15:23:16 +0300 Subject: [PATCH 092/130] Move data stream lifecycle related actions to server (#107607) --- .../datastreams/lifecycle/CrudDataStreamLifecycleIT.java | 4 ++-- .../lifecycle/DataStreamLifecycleServiceIT.java | 4 ++-- .../lifecycle/ExplainDataStreamLifecycleIT.java | 2 +- .../org/elasticsearch/datastreams/DataStreamsPlugin.java | 6 +++--- .../action/TransportExplainDataStreamLifecycleAction.java | 1 + .../action/TransportGetDataStreamLifecycleAction.java | 1 + .../action/TransportPutDataStreamLifecycleAction.java | 1 + .../rest/RestDeleteDataStreamLifecycleAction.java | 2 +- .../rest/RestExplainDataStreamLifecycleAction.java | 2 +- .../lifecycle/rest/RestGetDataStreamLifecycleAction.java | 2 +- .../lifecycle/rest/RestPutDataStreamLifecycleAction.java | 2 +- .../lifecycle}/ExplainDataStreamLifecycleAction.java | 7 ++----- .../lifecycle}/GetDataStreamLifecycleAction.java | 8 +++----- .../lifecycle}/PutDataStreamLifecycleAction.java | 4 ++-- .../ExplainDataStreamLifecycleResponseTests.java | 6 ++---- .../xpack/downsample/DataStreamLifecycleDownsampleIT.java | 2 +- .../xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java | 4 ++-- 17 files changed, 27 insertions(+), 31 deletions(-) rename {modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action => server/src/main/java/org/elasticsearch/action/datastreams/lifecycle}/ExplainDataStreamLifecycleAction.java (96%) rename {modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action => server/src/main/java/org/elasticsearch/action/datastreams/lifecycle}/GetDataStreamLifecycleAction.java (97%) rename {modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action => server/src/main/java/org/elasticsearch/action/datastreams/lifecycle}/PutDataStreamLifecycleAction.java (97%) rename {modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action => server/src/test/java/org/elasticsearch/action/datastreams/lifecycle}/ExplainDataStreamLifecycleResponseTests.java (98%) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java index b772e0bb347e2..d43dad87a6067 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/CrudDataStreamLifecycleIT.java @@ -9,12 +9,12 @@ package org.elasticsearch.datastreams.lifecycle; import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.datastreams.lifecycle.action.DeleteDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.action.GetDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 9880e5e9914a8..5ebdbd272f3fe 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -27,7 +27,9 @@ import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; +import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.coordination.StableMasterHealthIndicatorService; @@ -45,8 +47,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.DataStreamsPlugin; -import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthIndicatorService; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.GetHealthAction; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java index a497eed121b0c..7120196176928 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/ExplainDataStreamLifecycleIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; @@ -28,7 +29,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.datastreams.DataStreamsPlugin; -import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.plugins.Plugin; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index c18da970ca26b..a1e65d7784a39 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -18,6 +18,9 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; +import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; +import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; @@ -44,12 +47,9 @@ import org.elasticsearch.datastreams.lifecycle.UpdateDataStreamGlobalRetentionService; import org.elasticsearch.datastreams.lifecycle.action.DeleteDataStreamGlobalRetentionAction; import org.elasticsearch.datastreams.lifecycle.action.DeleteDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.GetDataStreamGlobalRetentionAction; -import org.elasticsearch.datastreams.lifecycle.action.GetDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.GetDataStreamLifecycleStatsAction; import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamGlobalRetentionAction; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.TransportDeleteDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.TransportExplainDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.TransportGetDataStreamLifecycleAction; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java index a5c3b092a8913..e88c023e8996d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; +import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java index 3a3a54d747920..881f472b19d3c 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java index 7a33d3011c621..11ecf85b1ac26 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamLifecycleAction.java index 22d99b67b3ff1..b624892ac6bba 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestDeleteDataStreamLifecycleAction.java @@ -21,7 +21,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; -@ServerlessScope(Scope.PUBLIC) +@ServerlessScope(Scope.INTERNAL) public class RestDeleteDataStreamLifecycleAction extends BaseRestHandler { @Override diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestExplainDataStreamLifecycleAction.java index 09f4b6efce633..d3115d6d3d3a3 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestExplainDataStreamLifecycleAction.java @@ -8,10 +8,10 @@ package org.elasticsearch.datastreams.lifecycle.rest; +import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamLifecycleAction.java index f2c514c794b32..3d802d483fd8c 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestGetDataStreamLifecycleAction.java @@ -7,10 +7,10 @@ */ package org.elasticsearch.datastreams.lifecycle.rest; +import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.datastreams.lifecycle.action.GetDataStreamLifecycleAction; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java index d97990b46c0ba..f763c0d75ed47 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java @@ -7,10 +7,10 @@ */ package org.elasticsearch.datastreams.lifecycle.rest; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainDataStreamLifecycleAction.java similarity index 96% rename from modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java rename to server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainDataStreamLifecycleAction.java index 7af70f98e8ce7..ee4f7fbaa9c59 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainDataStreamLifecycleAction.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.datastreams.lifecycle.action; +package org.elasticsearch.action.datastreams.lifecycle; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; -import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; @@ -37,9 +36,7 @@ */ public class ExplainDataStreamLifecycleAction { - public static final ActionType INSTANCE = new ActionType<>( - "indices:admin/data_stream/lifecycle/explain" - ); + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/lifecycle/explain"); private ExplainDataStreamLifecycleAction() {/* no instances */} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java similarity index 97% rename from modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java rename to server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java index 0a5bda11f2a22..d0dd67b4b4db5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/GetDataStreamLifecycleAction.java @@ -5,7 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -package org.elasticsearch.datastreams.lifecycle.action; +package org.elasticsearch.action.datastreams.lifecycle; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; @@ -38,9 +38,7 @@ */ public class GetDataStreamLifecycleAction { - public static final ActionType INSTANCE = new ActionType<>( - "indices:admin/data_stream/lifecycle/get" - ); + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/lifecycle/get"); private GetDataStreamLifecycleAction() {/* no instances */} @@ -205,7 +203,7 @@ public Response( public Response(StreamInput in) throws IOException { this( - in.readCollectionAsList(Response.DataStreamLifecycle::new), + in.readCollectionAsList(DataStreamLifecycle::new), in.readOptionalWriteable(RolloverConfiguration::new), in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) ? in.readOptionalWriteable(DataStreamGlobalRetention::read) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java similarity index 97% rename from modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java rename to server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java index c40988f1de6c7..8156e03b0cdd1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.datastreams.lifecycle.action; +package org.elasticsearch.action.datastreams.lifecycle; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; -import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -32,6 +31,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_RETENTION_FIELD; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DOWNSAMPLING_FIELD; +import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.ENABLED_FIELD; /** diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/lifecycle/ExplainDataStreamLifecycleResponseTests.java similarity index 98% rename from modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java rename to server/src/test/java/org/elasticsearch/action/datastreams/lifecycle/ExplainDataStreamLifecycleResponseTests.java index 6b9184ea09e0b..a47eca7692842 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/lifecycle/ExplainDataStreamLifecycleResponseTests.java @@ -6,14 +6,12 @@ * Side Public License, v 1. */ -package org.elasticsearch.datastreams.lifecycle.action; +package org.elasticsearch.action.datastreams.lifecycle; import org.elasticsearch.action.admin.indices.rollover.MaxPrimaryShardDocsCondition; import org.elasticsearch.action.admin.indices.rollover.MinPrimaryShardDocsCondition; import org.elasticsearch.action.admin.indices.rollover.RolloverConditions; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; -import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; -import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; @@ -35,7 +33,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction.Response; +import static org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction.Response; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleIT.java index 57024acee809f..d8dd030903e0e 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleIT.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling; @@ -16,7 +17,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESIntegTestCase; diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java index b9c58f728d1e3..7278b0e6c7f49 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataStreamAndIndexLifecycleMixingTests.java @@ -16,7 +16,9 @@ import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; +import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; +import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; @@ -28,8 +30,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; -import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; -import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; From 1a346cbb0d676cecd7e551199369aef1142c87f5 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Thu, 18 Apr 2024 14:29:22 +0200 Subject: [PATCH 093/130] Prioritize movement of write shards (#107605) This change enhances shard ordering in OrderedShardsIterator. Prior shards were ordered by node allocation recency. The ordering within the node was not explicitly defined. With this change the data stream write index shards are prioritized within the node. In case of the necessary shard movement, such shards will move first. This might be useful in case of the timeout for the node shutdown. In case of rebalancing such shards are going to be moved last to minimize disruptions to the ongoing indexing. --- .../allocator/DesiredBalanceReconciler.java | 4 +- .../allocator/OrderedShardsIterator.java | 47 ++++++++- .../allocator/OrderedShardsIteratorTests.java | 97 +++++++++++++++++-- 3 files changed, 133 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 95b0d23b564a2..85de123de3145 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -441,7 +441,7 @@ private boolean isIgnored(RoutingNodes routingNodes, ShardRouting shard, ShardAs private void moveShards() { // Iterate over all started shards and check if they can remain. In the presence of throttling shard movements, // the goal of this iteration order is to achieve a fairer movement of shards from the nodes that are offloading the shards. - for (final var iterator = OrderedShardsIterator.create(routingNodes, moveOrdering); iterator.hasNext();) { + for (final var iterator = OrderedShardsIterator.createForNecessaryMoves(allocation, moveOrdering); iterator.hasNext();) { final var shardRouting = iterator.next(); if (shardRouting.started() == false) { @@ -500,7 +500,7 @@ private void balance() { // Iterate over all started shards and try to move any which are on undesired nodes. In the presence of throttling shard // movements, the goal of this iteration order is to achieve a fairer movement of shards from the nodes that are offloading the // shards. - for (final var iterator = OrderedShardsIterator.create(routingNodes, moveOrdering); iterator.hasNext();) { + for (final var iterator = OrderedShardsIterator.createForBalancing(allocation, moveOrdering); iterator.hasNext();) { final var shardRouting = iterator.next(); totalAllocations++; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java index efe59d98ba5df..d697686f9258b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIterator.java @@ -8,34 +8,71 @@ package org.elasticsearch.cluster.routing.allocation.allocator; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.collect.Iterators; import java.util.ArrayDeque; +import java.util.Arrays; +import java.util.Comparator; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; /** - * This class iterates all shards from all nodes in order of allocation recency. - * Shards from the node that had a new shard allocation would appear in the end of iteration. + * This class iterates all shards from all nodes. + * The shard order is defined by + * (1) allocation recency: shards from the node that had a new shard allocation would appear in the end of iteration. + * (2) shard priority: for necessary moves data stream write shards, then regular index shards, then the rest + * for rebalancing the order is inverse */ public class OrderedShardsIterator implements Iterator { private final ArrayDeque queue; - public static OrderedShardsIterator create(RoutingNodes routingNodes, NodeAllocationOrdering ordering) { + public static OrderedShardsIterator createForNecessaryMoves(RoutingAllocation allocation, NodeAllocationOrdering ordering) { + return create(allocation.routingNodes(), createShardsComparator(allocation.metadata()), ordering); + } + + public static OrderedShardsIterator createForBalancing(RoutingAllocation allocation, NodeAllocationOrdering ordering) { + return create(allocation.routingNodes(), createShardsComparator(allocation.metadata()).reversed(), ordering); + } + + private static OrderedShardsIterator create( + RoutingNodes routingNodes, + Comparator shardOrder, + NodeAllocationOrdering nodeOrder + ) { var queue = new ArrayDeque(routingNodes.size()); - for (var nodeId : ordering.sort(routingNodes.getAllNodeIds())) { + for (var nodeId : nodeOrder.sort(routingNodes.getAllNodeIds())) { var node = routingNodes.node(nodeId); if (node.size() > 0) { - queue.add(new NodeAndShardIterator(nodeId, Iterators.forArray(node.copyShards()))); + queue.add(new NodeAndShardIterator(nodeId, sort(shardOrder, node.copyShards()))); } } return new OrderedShardsIterator(queue); } + private static Iterator sort(Comparator comparator, ShardRouting[] shards) { + Arrays.sort(shards, comparator); + return Iterators.forArray(shards); + } + + private static Comparator createShardsComparator(Metadata metadata) { + return Comparator.comparing(shard -> { + var lookup = metadata.getIndicesLookup().get(shard.getIndexName()); + if (lookup != null && lookup.getParentDataStream() != null) { + // prioritize write indices of the data stream + return Objects.equals(lookup.getParentDataStream().getWriteIndex(), shard.index()) ? 0 : 2; + } else { + // regular index + return 1; + } + }); + } + private OrderedShardsIterator(ArrayDeque queue) { this.queue = queue; } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java index f9ef743dc7fad..bc0590625d290 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java @@ -8,14 +8,23 @@ package org.elasticsearch.cluster.routing.allocation.allocator; +import org.elasticsearch.cluster.ClusterInfo; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; @@ -28,6 +37,7 @@ import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -49,7 +59,10 @@ public void testOrdersShardsAccordingToAllocationRecency() { var ordering = new NodeAllocationOrdering(); ordering.recordAllocation("node-1"); - var iterator = createOrderedShardsIterator(nodes, routing, ordering); + var iterator = OrderedShardsIterator.createForNecessaryMoves( + createRoutingAllocation(nodes, Metadata.EMPTY_METADATA, routing), + ordering + ); // order within same priority is not defined // no recorded allocations first @@ -81,7 +94,10 @@ public void testReOrdersShardDuringIteration() { ordering.recordAllocation("node-3"); ordering.recordAllocation("node-2"); - var iterator = createOrderedShardsIterator(nodes, routing, ordering); + var iterator = OrderedShardsIterator.createForNecessaryMoves( + createRoutingAllocation(nodes, Metadata.EMPTY_METADATA, routing), + ordering + ); var first = iterator.next(); assertThat(first, anyOf(isIndexShardAt("index-1a", "node-1"), isIndexShardAt("index-1b", "node-1"))); @@ -93,13 +109,76 @@ public void testReOrdersShardDuringIteration() { assertThat(iterator.hasNext(), equalTo(false)); } - private OrderedShardsIterator createOrderedShardsIterator(DiscoveryNodes nodes, RoutingTable routing, NodeAllocationOrdering ordering) { - var routingNodes = randomBoolean() ? RoutingNodes.mutable(routing, nodes) : RoutingNodes.immutable(routing, nodes); - return OrderedShardsIterator.create(routingNodes, ordering); + public void testShouldOrderShardByPriority() { + + var nodes = DiscoveryNodes.builder().add(newNode("node-1")).add(newNode("node-2")).build(); + + IndexMetadata lookup = IndexMetadata.builder("lookup").settings(indexSettings(IndexVersion.current(), 1, 0)).build(); + IndexMetadata ds1 = IndexMetadata.builder(".ds-data-stream-2024.04.18-000001") + .settings(indexSettings(IndexVersion.current(), 1, 0)) + .build(); + IndexMetadata ds2 = IndexMetadata.builder(".ds-data-stream-2024.04.18-000002") + .settings(indexSettings(IndexVersion.current(), 1, 0)) + .build(); + + var metadata = Metadata.builder() + .put(lookup, false) + .put(ds1, false) + .put(ds2, false) + .put(DataStream.builder("data-stream", List.of(ds1.getIndex(), ds2.getIndex())).build()) + .build(); + + var routing = RoutingTable.builder() + .add(index(lookup.getIndex(), "node-1")) + .add(index(ds1.getIndex(), "node-1")) + .add(index(ds2.getIndex(), "node-1")) + .build(); + + // when performing necessary moves (such as preparation for the node shutdown) write shards should be moved first + assertThat( + next( + 3, + OrderedShardsIterator.createForNecessaryMoves( + createRoutingAllocation(nodes, metadata, routing), + new NodeAllocationOrdering() + ) + ), + contains( + isIndexShardAt(".ds-data-stream-2024.04.18-000002", "node-1"), + isIndexShardAt("lookup", "node-1"), + isIndexShardAt(".ds-data-stream-2024.04.18-000001", "node-1") + ) + ); + + // when performing rebalancing write shards should be moved last + assertThat( + next( + 3, + OrderedShardsIterator.createForBalancing(createRoutingAllocation(nodes, metadata, routing), new NodeAllocationOrdering()) + ), + contains( + isIndexShardAt(".ds-data-stream-2024.04.18-000001", "node-1"), + isIndexShardAt("lookup", "node-1"), + isIndexShardAt(".ds-data-stream-2024.04.18-000002", "node-1") + ) + ); + } + + private static RoutingAllocation createRoutingAllocation(DiscoveryNodes nodes, Metadata metadata, RoutingTable routing) { + return new RoutingAllocation( + new AllocationDeciders(List.of()), + ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).metadata(metadata).routingTable(routing).build(), + ClusterInfo.EMPTY, + SnapshotShardSizeInfo.EMPTY, + 0 + ); } private static IndexRoutingTable index(String indexName, String nodeId) { - var index = new Index(indexName, "_na_"); + return index(new Index(indexName, "_na_"), nodeId); + } + + private static IndexRoutingTable index(Index index, String nodeId) { return IndexRoutingTable.builder(index).addShard(newShardRouting(new ShardId(index, 0), nodeId, true, STARTED)).build(); } @@ -120,7 +199,9 @@ protected boolean matchesSafely(ShardRouting item) { } @Override - public void describeTo(Description description) {} + public void describeTo(Description description) { + description.appendText("[" + indexName + "][0], node[" + nodeId + "]"); + } }; } } From 6248bfdab6545305e96ad4152fa3c17a76bdba84 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 18 Apr 2024 14:40:12 +0200 Subject: [PATCH 094/130] Cleanup unused test code in ScriptedMetricTests (#107608) Just a random find, this isn't used now that the parsed aggs tests are gone. --- .../metrics/InternalScriptedMetricTests.java | 60 +------------------ 1 file changed, 1 insertion(+), 59 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index 25d69a956ff81..3577b6afd73df 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -26,7 +26,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -38,7 +37,6 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase[] valueTypes; @SuppressWarnings({ "rawtypes", "unchecked" }) private final Supplier[] leafValueSuppliers = new Supplier[] { () -> randomInt(), @@ -50,24 +48,13 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase new GeoPoint(randomDouble(), randomDouble()), () -> null }; @SuppressWarnings({ "rawtypes", "unchecked" }) - private final Supplier[] nestedValueSuppliers = new Supplier[] { () -> new HashMap(), () -> new ArrayList<>() }; + private final Supplier[] nestedValueSuppliers = new Supplier[] { HashMap::new, ArrayList::new }; @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public void setUp() throws Exception { super.setUp(); hasReduceScript = randomBoolean(); - // we want the same value types (also for nested lists, maps) for all random aggregations - int levels = randomIntBetween(1, 3); - valueTypes = new Supplier[levels]; - for (int i = 0; i < levels; i++) { - if (i < levels - 1) { - valueTypes[i] = randomFrom(nestedValueSuppliers); - } else { - // the last one needs to be a leaf value, not map or list - valueTypes[i] = randomFrom(leafValueSuppliers); - } - } } @Override @@ -178,51 +165,6 @@ public InternalScriptedMetric createTestInstanceForXContent() { ); } - private static void assertValues(Object expected, Object actual) { - if (expected instanceof Long) { - // longs that fit into the integer range are parsed back as integer - if (actual instanceof Integer) { - assertEquals(((Long) expected).intValue(), actual); - } else { - assertEquals(expected, actual); - } - } else if (expected instanceof Float) { - // based on the xContent type, floats are sometimes parsed back as doubles - if (actual instanceof Double) { - assertEquals(expected, ((Double) actual).floatValue()); - } else { - assertEquals(expected, actual); - } - } else if (expected instanceof GeoPoint point) { - assertTrue(actual instanceof Map); - @SuppressWarnings("unchecked") - Map pointMap = (Map) actual; - assertEquals(point.getLat(), pointMap.get("lat")); - assertEquals(point.getLon(), pointMap.get("lon")); - } else if (expected instanceof Map) { - @SuppressWarnings("unchecked") - Map expectedMap = (Map) expected; - @SuppressWarnings("unchecked") - Map actualMap = (Map) actual; - assertEquals(expectedMap.size(), actualMap.size()); - for (String key : expectedMap.keySet()) { - assertValues(expectedMap.get(key), actualMap.get(key)); - } - } else if (expected instanceof List) { - @SuppressWarnings("unchecked") - List expectedList = (List) expected; - @SuppressWarnings("unchecked") - List actualList = (List) actual; - assertEquals(expectedList.size(), actualList.size()); - Iterator actualIterator = actualList.iterator(); - for (Object element : expectedList) { - assertValues(element, actualIterator.next()); - } - } else { - assertEquals(expected, actual); - } - } - @Override protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) { String name = instance.getName(); From fde150011db13839b1529a4ba5e00678921d5f4d Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Thu, 18 Apr 2024 08:51:01 -0400 Subject: [PATCH 095/130] Users with monitor privileges can access async_search/status endpoint even when setting keep_alive (#107383) Fixes a bug in the async-search status endpoint where a user with monitor privileges is not able to access the status endpoint when setting keep_alive state of the async-search. --- docs/changelog/107383.yaml | 6 ++++++ .../xpack/search/AsyncSearchSecurityIT.java | 11 ++++++++++- .../xpack/search/TransportGetAsyncStatusAction.java | 3 ++- 3 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107383.yaml diff --git a/docs/changelog/107383.yaml b/docs/changelog/107383.yaml new file mode 100644 index 0000000000000..07886ac96180c --- /dev/null +++ b/docs/changelog/107383.yaml @@ -0,0 +1,6 @@ +pr: 107383 +summary: Users with monitor privileges can access async_search/status endpoint + even when setting keep_alive +area: Authorization +type: bug +issues: [] diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index f628566587611..05eddd742960c 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -177,7 +177,6 @@ public void testWithUsers() throws Exception { * the testWithUsers test is generally testing). * @throws IOException */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106871") public void testStatusWithUsersWhileSearchIsRunning() throws IOException { assumeTrue("[error_query] is only available in snapshot builds", Build.current().isSnapshot()); String user = randomFrom("user1", "user2"); @@ -250,6 +249,9 @@ private static void userBasedPermissionsAsserts(String user, String other, Strin // user-monitor can access the status assertOK(getAsyncStatus(id, "user-monitor")); + // user-monitor can access status and set keep_alive + assertOK(getAsyncStatusAndSetKeepAlive(id, "user-monitor")); + // user-monitor cannot access the result exc = expectThrows(ResponseException.class, () -> getAsyncSearch(id, "user-monitor")); assertThat(exc.getResponse().getStatusLine().getStatusCode(), equalTo(404)); @@ -485,6 +487,13 @@ static Response getAsyncStatus(String id, String user) throws IOException { return client().performRequest(request); } + static Response getAsyncStatusAndSetKeepAlive(String id, String user) throws IOException { + final Request request = new Request("GET", "/_async_search/status/" + id); + setRunAsHeader(request, user); + request.addParameter("keep_alive", "3m"); + return client().performRequest(request); + } + static Response getAsyncSearch(String id, String user) throws IOException { final Request request = new Request("GET", "/_async_search/" + id); setRunAsHeader(request, user); diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportGetAsyncStatusAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportGetAsyncStatusAction.java index cc5cd797f3fbc..cc27e82a69388 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportGetAsyncStatusAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportGetAsyncStatusAction.java @@ -35,6 +35,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; +import static org.elasticsearch.xpack.core.async.AsyncTaskIndexService.getTask; public class TransportGetAsyncStatusAction extends HandledTransportAction { private final TransportService transportService; @@ -76,7 +77,7 @@ protected void doExecute(Task task, GetAsyncStatusRequest request, ActionListene if (request.getKeepAlive() != null && request.getKeepAlive().getMillis() > 0) { long expirationTime = System.currentTimeMillis() + request.getKeepAlive().getMillis(); store.updateExpirationTime(searchId.getDocId(), expirationTime, ActionListener.wrap(p -> { - AsyncSearchTask asyncSearchTask = store.getTaskAndCheckAuthentication(taskManager, searchId, AsyncSearchTask.class); + AsyncSearchTask asyncSearchTask = getTask(taskManager, searchId, AsyncSearchTask.class); if (asyncSearchTask != null) { asyncSearchTask.setExpirationTime(expirationTime); } From 8adc2926a2b529d48bf67e6d7ec5b7a8f329136f Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 18 Apr 2024 08:08:30 -0500 Subject: [PATCH 096/130] Fixed the spelling of the word successful in docs (#107595) --- .../ingest/apis/geoip-stats-api.asciidoc | 2 +- ...grate-to-data-tiers-routing-guide.asciidoc | 20 +++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/reference/ingest/apis/geoip-stats-api.asciidoc b/docs/reference/ingest/apis/geoip-stats-api.asciidoc index 4917441c0020b..6135b7821f2a5 100644 --- a/docs/reference/ingest/apis/geoip-stats-api.asciidoc +++ b/docs/reference/ingest/apis/geoip-stats-api.asciidoc @@ -109,7 +109,7 @@ The number cache entries evicted from the cache. `hits_time_in_millis`:: (Long) -The amount of time in milliseconds spent fetching data from the cache on succesful cache hits only. +The amount of time in milliseconds spent fetching data from the cache on successful cache hits only. `misses_time_in_millis`:: (Long) diff --git a/docs/reference/tab-widgets/troubleshooting/data/migrate-to-data-tiers-routing-guide.asciidoc b/docs/reference/tab-widgets/troubleshooting/data/migrate-to-data-tiers-routing-guide.asciidoc index 0777ed8685400..810f9299c9ea5 100644 --- a/docs/reference/tab-widgets/troubleshooting/data/migrate-to-data-tiers-routing-guide.asciidoc +++ b/docs/reference/tab-widgets/troubleshooting/data/migrate-to-data-tiers-routing-guide.asciidoc @@ -1,8 +1,8 @@ // tag::cloud[] -In order to get the shards assigned we need to call the +In order to get the shards assigned we need to call the <> API which will resolve the conflicting routing configurations towards using the standardized -<>. This will also future-proof the system by migrating +<>. This will also future-proof the system by migrating the index templates and ILM policies if needed. **Use {kib}** @@ -11,12 +11,12 @@ the index templates and ILM policies if needed. . Log in to the {ess-console}[{ecloud} console]. + -. On the **Elasticsearch Service** panel, click the name of your deployment. +. On the **Elasticsearch Service** panel, click the name of your deployment. + NOTE: If the name of your deployment is disabled your {kib} instances might be unhealthy, in which case please contact https://support.elastic.co[Elastic Support]. -If your deployment doesn't include {kib}, all you need to do is +If your deployment doesn't include {kib}, all you need to do is {cloud}/ec-access-kibana.html[enable it first]. . Open your deployment's side navigation menu (placed under the Elastic logo in the upper left corner) @@ -50,7 +50,7 @@ The response will look like this: GET /_ilm/status ---- + -When {ilm} has succesfully stopped the response will look like this: +When {ilm} has successfully stopped the response will look like this: + [source,console-result] ------------------------------------------------------------------------------ @@ -113,15 +113,15 @@ The response will look like this: // end::cloud[] // tag::self-managed[] -In order to get the shards assigned we need to make sure the deployment is -using the <> node roles and then call the +In order to get the shards assigned we need to make sure the deployment is +using the <> node roles and then call the <> API which will resolve the conflicting routing configurations towards using the standardized -<>. This will also future-proof the system by migrating +<>. This will also future-proof the system by migrating the index templates and ILM policies if needed. -. In case your deployment is not yet using <> <> +. In case your deployment is not yet using <> <> to the appropriate data tier. Configure the appropriate roles for each data node to assign it to one or more data tiers: `data_hot`, `data_content`, `data_warm`, `data_cold`, or `data_frozen`. @@ -159,7 +159,7 @@ The response will look like this: GET /_ilm/status ---- + -When {ilm} has succesfully stopped the response will look like this: +When {ilm} has successfully stopped the response will look like this: + [source,console-result] ------------------------------------------------------------------------------ From cad5d90fa08c575c6fadfc6ffc174cdce3b4575e Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 18 Apr 2024 08:09:27 -0500 Subject: [PATCH 097/130] Converting the inner record-like classes in EnrichStatsAction to records (#107594) --- .../core/enrich/action/EnrichStatsAction.java | 152 ++---------------- .../xpack/enrich/EnrichMultiNodeIT.java | 8 +- .../xpack/enrich/EnrichProcessorIT.java | 18 +-- .../action/TransportEnrichStatsAction.java | 6 +- .../xpack/enrich/BasicEnrichTests.java | 12 +- .../xpack/enrich/EnrichCacheTests.java | 32 ++-- .../enrich/EnrichProcessorFactoryTests.java | 16 +- .../enrich/EnrichCoordinatorDocTests.java | 10 +- .../enrich/ExecutingPolicyDocTests.java | 24 ++- 9 files changed, 74 insertions(+), 204 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index c0d96347ccae4..be8513a8269a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -128,52 +128,18 @@ public int hashCode() { return Objects.hash(executingPolicies, coordinatorStats, cacheStats); } - public static class CoordinatorStats implements Writeable, ToXContentFragment { - - private final String nodeId; - private final int queueSize; - private final int remoteRequestsCurrent; - private final long remoteRequestsTotal; - private final long executedSearchesTotal; - - public CoordinatorStats( - String nodeId, - int queueSize, - int remoteRequestsCurrent, - long remoteRequestsTotal, - long executedSearchesTotal - ) { - this.nodeId = nodeId; - this.queueSize = queueSize; - this.remoteRequestsCurrent = remoteRequestsCurrent; - this.remoteRequestsTotal = remoteRequestsTotal; - this.executedSearchesTotal = executedSearchesTotal; - } + public record CoordinatorStats( + String nodeId, + int queueSize, + int remoteRequestsCurrent, + long remoteRequestsTotal, + long executedSearchesTotal + ) implements Writeable, ToXContentFragment { public CoordinatorStats(StreamInput in) throws IOException { this(in.readString(), in.readVInt(), in.readVInt(), in.readVLong(), in.readVLong()); } - public String getNodeId() { - return nodeId; - } - - public int getQueueSize() { - return queueSize; - } - - public int getRemoteRequestsCurrent() { - return remoteRequestsCurrent; - } - - public long getRemoteRequestsTotal() { - return remoteRequestsTotal; - } - - public long getExecutedSearchesTotal() { - return executedSearchesTotal; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(nodeId); @@ -192,47 +158,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("executed_searches_total", executedSearchesTotal); return builder; } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CoordinatorStats stats = (CoordinatorStats) o; - return Objects.equals(nodeId, stats.nodeId) - && queueSize == stats.queueSize - && remoteRequestsCurrent == stats.remoteRequestsCurrent - && remoteRequestsTotal == stats.remoteRequestsTotal - && executedSearchesTotal == stats.executedSearchesTotal; - } - - @Override - public int hashCode() { - return Objects.hash(nodeId, queueSize, remoteRequestsCurrent, remoteRequestsTotal, executedSearchesTotal); - } } - public static class ExecutingPolicy implements Writeable, ToXContentFragment { - - private final String name; - private final TaskInfo taskInfo; - - public ExecutingPolicy(String name, TaskInfo taskInfo) { - this.name = name; - this.taskInfo = taskInfo; - } + public record ExecutingPolicy(String name, TaskInfo taskInfo) implements Writeable, ToXContentFragment { ExecutingPolicy(StreamInput in) throws IOException { this(in.readString(), TaskInfo.from(in)); } - public String getName() { - return name; - } - - public TaskInfo getTaskInfo() { - return taskInfo; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); @@ -249,61 +182,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ExecutingPolicy that = (ExecutingPolicy) o; - return name.equals(that.name) && taskInfo.equals(that.taskInfo); - } - - @Override - public int hashCode() { - return Objects.hash(name, taskInfo); - } } - public static class CacheStats implements Writeable, ToXContentFragment { - - private final String nodeId; - private final long count; - private final long hits; - private final long misses; - private final long evictions; - - public CacheStats(String nodeId, long count, long hits, long misses, long evictions) { - this.nodeId = nodeId; - this.count = count; - this.hits = hits; - this.misses = misses; - this.evictions = evictions; - } + public record CacheStats(String nodeId, long count, long hits, long misses, long evictions) + implements + Writeable, + ToXContentFragment { public CacheStats(StreamInput in) throws IOException { this(in.readString(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong()); } - public String getNodeId() { - return nodeId; - } - - public long getCount() { - return count; - } - - public long getHits() { - return hits; - } - - public long getMisses() { - return misses; - } - - public long getEvictions() { - return evictions; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("node_id", nodeId); @@ -322,23 +211,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(misses); out.writeVLong(evictions); } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CacheStats that = (CacheStats) o; - return count == that.count - && hits == that.hits - && misses == that.misses - && evictions == that.evictions - && nodeId.equals(that.nodeId); - } - - @Override - public int hashCode() { - return Objects.hash(nodeId, count, hits, misses, evictions); - } } } diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java index 26e38252a4572..1960913a6d4f4 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java @@ -268,12 +268,12 @@ private static void enrich(Map> keys, String coordinatingNo .actionGet(); assertThat(statsResponse.getCoordinatorStats().size(), equalTo(internalCluster().size())); String nodeId = getNodeId(coordinatingNode); - CoordinatorStats stats = statsResponse.getCoordinatorStats().stream().filter(s -> s.getNodeId().equals(nodeId)).findAny().get(); - assertThat(stats.getNodeId(), equalTo(nodeId)); - assertThat(stats.getRemoteRequestsTotal(), greaterThanOrEqualTo(1L)); + CoordinatorStats stats = statsResponse.getCoordinatorStats().stream().filter(s -> s.nodeId().equals(nodeId)).findAny().get(); + assertThat(stats.nodeId(), equalTo(nodeId)); + assertThat(stats.remoteRequestsTotal(), greaterThanOrEqualTo(1L)); // 'numDocs' lookups are done, but not 'numDocs' searches, because searches may get cached: // and not all enrichments may happen via the same node. - assertThat(stats.getExecutedSearchesTotal(), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo((long) numDocs))); + assertThat(stats.executedSearchesTotal(), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo((long) numDocs))); } private static List createSourceIndex(int numDocs) { diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java index e93f1e9c22028..f3d2403ce5d96 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorIT.java @@ -52,9 +52,9 @@ public void testEnrichCacheValuesCannotBeCorrupted() { var statsRequest = new EnrichStatsAction.Request(); var statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); assertThat(statsResponse.getCacheStats().size(), equalTo(1)); - assertThat(statsResponse.getCacheStats().get(0).getCount(), equalTo(0L)); - assertThat(statsResponse.getCacheStats().get(0).getMisses(), equalTo(0L)); - assertThat(statsResponse.getCacheStats().get(0).getHits(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).misses(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).hits(), equalTo(0L)); String policyName = "device-enrich-policy"; String sourceIndexName = "devices-idx"; @@ -128,9 +128,9 @@ public void testEnrichCacheValuesCannotBeCorrupted() { // Verify that there was a cache miss and a new entry was added to enrich cache. statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); assertThat(statsResponse.getCacheStats().size(), equalTo(1)); - assertThat(statsResponse.getCacheStats().get(0).getCount(), equalTo(1L)); - assertThat(statsResponse.getCacheStats().get(0).getMisses(), equalTo(1L)); - assertThat(statsResponse.getCacheStats().get(0).getHits(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(1L)); + assertThat(statsResponse.getCacheStats().get(0).misses(), equalTo(1L)); + assertThat(statsResponse.getCacheStats().get(0).hits(), equalTo(0L)); simulatePipelineRequest = new SimulatePipelineRequest(new BytesArray(""" { @@ -164,9 +164,9 @@ public void testEnrichCacheValuesCannotBeCorrupted() { // Verify that enrich lookup was served from cache: statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); assertThat(statsResponse.getCacheStats().size(), equalTo(1)); - assertThat(statsResponse.getCacheStats().get(0).getCount(), equalTo(1L)); - assertThat(statsResponse.getCacheStats().get(0).getMisses(), equalTo(1L)); - assertThat(statsResponse.getCacheStats().get(0).getHits(), equalTo(1L)); + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(1L)); + assertThat(statsResponse.getCacheStats().get(0).misses(), equalTo(1L)); + assertThat(statsResponse.getCacheStats().get(0).hits(), equalTo(1L)); } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java index 6b815f3fc7445..7bd2c43f54eaf 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportEnrichStatsAction.java @@ -84,7 +84,7 @@ protected void masterOperation( List coordinatorStats = response.getNodes() .stream() .map(EnrichCoordinatorStatsAction.NodeResponse::getCoordinatorStats) - .sorted(Comparator.comparing(CoordinatorStats::getNodeId)) + .sorted(Comparator.comparing(CoordinatorStats::nodeId)) .collect(Collectors.toList()); List policyExecutionTasks = taskManager.getTasks() .values() @@ -92,13 +92,13 @@ protected void masterOperation( .filter(t -> t.getAction().equals(EnrichPolicyExecutor.TASK_ACTION)) .map(t -> t.taskInfo(clusterService.localNode().getId(), true)) .map(t -> new ExecutingPolicy(t.description(), t)) - .sorted(Comparator.comparing(ExecutingPolicy::getName)) + .sorted(Comparator.comparing(ExecutingPolicy::name)) .collect(Collectors.toList()); List cacheStats = response.getNodes() .stream() .map(EnrichCoordinatorStatsAction.NodeResponse::getCacheStats) .filter(Objects::nonNull) - .sorted(Comparator.comparing(EnrichStatsAction.Response.CacheStats::getNodeId)) + .sorted(Comparator.comparing(EnrichStatsAction.Response.CacheStats::nodeId)) .collect(Collectors.toList()); delegate.onResponse(new EnrichStatsAction.Response(policyExecutionTasks, coordinatorStats, cacheStats)); }); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java index 4081d7108b0e7..e3822b366e122 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java @@ -150,9 +150,9 @@ public void testIngestDataWithMatchProcessor() { .actionGet(); assertThat(statsResponse.getCoordinatorStats().size(), equalTo(1)); String localNodeId = getInstanceFromNode(ClusterService.class).localNode().getId(); - assertThat(statsResponse.getCoordinatorStats().get(0).getNodeId(), equalTo(localNodeId)); - assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsTotal(), greaterThanOrEqualTo(1L)); - assertThat(statsResponse.getCoordinatorStats().get(0).getExecutedSearchesTotal(), equalTo((long) numDocs)); + assertThat(statsResponse.getCoordinatorStats().get(0).nodeId(), equalTo(localNodeId)); + assertThat(statsResponse.getCoordinatorStats().get(0).remoteRequestsTotal(), greaterThanOrEqualTo(1L)); + assertThat(statsResponse.getCoordinatorStats().get(0).executedSearchesTotal(), equalTo((long) numDocs)); } public void testIngestDataWithGeoMatchProcessor() { @@ -230,9 +230,9 @@ public void testIngestDataWithGeoMatchProcessor() { .actionGet(); assertThat(statsResponse.getCoordinatorStats().size(), equalTo(1)); String localNodeId = getInstanceFromNode(ClusterService.class).localNode().getId(); - assertThat(statsResponse.getCoordinatorStats().get(0).getNodeId(), equalTo(localNodeId)); - assertThat(statsResponse.getCoordinatorStats().get(0).getRemoteRequestsTotal(), greaterThanOrEqualTo(1L)); - assertThat(statsResponse.getCoordinatorStats().get(0).getExecutedSearchesTotal(), equalTo(1L)); + assertThat(statsResponse.getCoordinatorStats().get(0).nodeId(), equalTo(localNodeId)); + assertThat(statsResponse.getCoordinatorStats().get(0).remoteRequestsTotal(), greaterThanOrEqualTo(1L)); + assertThat(statsResponse.getCoordinatorStats().get(0).executedSearchesTotal(), equalTo(1L)); } public void testMultiplePolicies() { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java index fe3c3b3e467ef..2ecb40b306a63 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java @@ -84,27 +84,27 @@ public void testCaching() { enrichCache.put(searchRequest2, searchResponse); enrichCache.put(searchRequest3, searchResponse); var cacheStats = enrichCache.getStats("_id"); - assertThat(cacheStats.getCount(), equalTo(3L)); - assertThat(cacheStats.getHits(), equalTo(0L)); - assertThat(cacheStats.getMisses(), equalTo(0L)); - assertThat(cacheStats.getEvictions(), equalTo(0L)); + assertThat(cacheStats.count(), equalTo(3L)); + assertThat(cacheStats.hits(), equalTo(0L)); + assertThat(cacheStats.misses(), equalTo(0L)); + assertThat(cacheStats.evictions(), equalTo(0L)); assertThat(enrichCache.get(searchRequest1), notNullValue()); assertThat(enrichCache.get(searchRequest2), notNullValue()); assertThat(enrichCache.get(searchRequest3), notNullValue()); assertThat(enrichCache.get(searchRequest4), nullValue()); cacheStats = enrichCache.getStats("_id"); - assertThat(cacheStats.getCount(), equalTo(3L)); - assertThat(cacheStats.getHits(), equalTo(3L)); - assertThat(cacheStats.getMisses(), equalTo(1L)); - assertThat(cacheStats.getEvictions(), equalTo(0L)); + assertThat(cacheStats.count(), equalTo(3L)); + assertThat(cacheStats.hits(), equalTo(3L)); + assertThat(cacheStats.misses(), equalTo(1L)); + assertThat(cacheStats.evictions(), equalTo(0L)); enrichCache.put(searchRequest4, searchResponse); cacheStats = enrichCache.getStats("_id"); - assertThat(cacheStats.getCount(), equalTo(3L)); - assertThat(cacheStats.getHits(), equalTo(3L)); - assertThat(cacheStats.getMisses(), equalTo(1L)); - assertThat(cacheStats.getEvictions(), equalTo(1L)); + assertThat(cacheStats.count(), equalTo(3L)); + assertThat(cacheStats.hits(), equalTo(3L)); + assertThat(cacheStats.misses(), equalTo(1L)); + assertThat(cacheStats.evictions(), equalTo(1L)); // Simulate enrich policy execution, which should make current cache entries unused. metadata = Metadata.builder() @@ -142,10 +142,10 @@ public void testCaching() { assertThat(enrichCache.get(searchRequest3), notNullValue()); assertThat(enrichCache.get(searchRequest4), nullValue()); cacheStats = enrichCache.getStats("_id"); - assertThat(cacheStats.getCount(), equalTo(3L)); - assertThat(cacheStats.getHits(), equalTo(6L)); - assertThat(cacheStats.getMisses(), equalTo(6L)); - assertThat(cacheStats.getEvictions(), equalTo(4L)); + assertThat(cacheStats.count(), equalTo(3L)); + assertThat(cacheStats.hits(), equalTo(6L)); + assertThat(cacheStats.misses(), equalTo(6L)); + assertThat(cacheStats.evictions(), equalTo(4L)); } public void testPutIfAbsent() throws InterruptedException { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java index 9d63c56ecf721..f10c8e4e41c93 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java @@ -303,10 +303,10 @@ protected void assertThat(failure[0], nullValue()); assertThat(result[0], notNullValue()); assertThat(requestCounter[0], equalTo(1)); - assertThat(enrichCache.getStats("_id").getCount(), equalTo(1L)); - assertThat(enrichCache.getStats("_id").getMisses(), equalTo(1L)); - assertThat(enrichCache.getStats("_id").getHits(), equalTo(0L)); - assertThat(enrichCache.getStats("_id").getEvictions(), equalTo(0L)); + assertThat(enrichCache.getStats("_id").count(), equalTo(1L)); + assertThat(enrichCache.getStats("_id").misses(), equalTo(1L)); + assertThat(enrichCache.getStats("_id").hits(), equalTo(0L)); + assertThat(enrichCache.getStats("_id").evictions(), equalTo(0L)); // No search is performed, result is read from the cache: result[0] = null; @@ -318,10 +318,10 @@ protected void assertThat(failure[0], nullValue()); assertThat(result[0], notNullValue()); assertThat(requestCounter[0], equalTo(1)); - assertThat(enrichCache.getStats("_id").getCount(), equalTo(1L)); - assertThat(enrichCache.getStats("_id").getMisses(), equalTo(1L)); - assertThat(enrichCache.getStats("_id").getHits(), equalTo(1L)); - assertThat(enrichCache.getStats("_id").getEvictions(), equalTo(0L)); + assertThat(enrichCache.getStats("_id").count(), equalTo(1L)); + assertThat(enrichCache.getStats("_id").misses(), equalTo(1L)); + assertThat(enrichCache.getStats("_id").hits(), equalTo(1L)); + assertThat(enrichCache.getStats("_id").evictions(), equalTo(0L)); } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java index d5560f910177f..1ff8214294c1d 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichCoordinatorDocTests.java @@ -109,11 +109,11 @@ public void testToXContent() throws IOException { DATE_TIME_FORMATTER.formatMillis(timestamp), intervalMillis, DATE_TIME_FORMATTER.formatMillis(nodeTimestamp), - stats.getNodeId(), - stats.getQueueSize(), - stats.getRemoteRequestsCurrent(), - stats.getRemoteRequestsTotal(), - stats.getExecutedSearchesTotal() + stats.nodeId(), + stats.queueSize(), + stats.remoteRequestsCurrent(), + stats.remoteRequestsTotal(), + stats.executedSearchesTotal() ) ) ) diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java index f56024c22e33b..3352e6e2bb8a4 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/ExecutingPolicyDocTests.java @@ -72,23 +72,21 @@ public void testToXContent() throws IOException { final ExecutingPolicyDoc document = new ExecutingPolicyDoc("_cluster", timestamp, intervalMillis, node, executingPolicy); final BytesReference xContent = XContentHelper.toXContent(document, XContentType.JSON, false); - Optional> header = executingPolicy.getTaskInfo().headers().entrySet().stream().findAny(); + Optional> header = executingPolicy.taskInfo().headers().entrySet().stream().findAny(); Object[] args = new Object[] { DATE_TIME_FORMATTER.formatMillis(timestamp), intervalMillis, DATE_TIME_FORMATTER.formatMillis(nodeTimestamp), - executingPolicy.getName(), - executingPolicy.getTaskInfo().taskId().getNodeId(), - executingPolicy.getTaskInfo().taskId().getId(), - executingPolicy.getTaskInfo().type(), - executingPolicy.getTaskInfo().action(), - executingPolicy.getTaskInfo().description(), - executingPolicy.getTaskInfo().startTime(), - executingPolicy.getTaskInfo().runningTimeNanos(), - executingPolicy.getTaskInfo().cancellable(), - executingPolicy.getTaskInfo().cancellable() - ? Strings.format("\"cancelled\": %s,", executingPolicy.getTaskInfo().cancelled()) - : "", + executingPolicy.name(), + executingPolicy.taskInfo().taskId().getNodeId(), + executingPolicy.taskInfo().taskId().getId(), + executingPolicy.taskInfo().type(), + executingPolicy.taskInfo().action(), + executingPolicy.taskInfo().description(), + executingPolicy.taskInfo().startTime(), + executingPolicy.taskInfo().runningTimeNanos(), + executingPolicy.taskInfo().cancellable(), + executingPolicy.taskInfo().cancellable() ? Strings.format("\"cancelled\": %s,", executingPolicy.taskInfo().cancelled()) : "", header.map(entry -> { return Strings.format(""" {"%s":"%s"}""", entry.getKey(), entry.getValue()); }).orElse("{}") }; assertThat(xContent.utf8ToString(), equalTo(XContentHelper.stripWhitespace(Strings.format(""" From b18af0706e7e374342bf1392dc01b1184543282a Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 18 Apr 2024 14:53:12 +0100 Subject: [PATCH 098/130] Ensure fields created through dynamic template are detectable (#107491) This change ensures that the field type parser reliably access whether the field comes from a dynamic template rule. Since dynamic templates and index templates are validated at creation time, this change allows to detect dynamic fields early (when the mapping/template is created). --- .../index/mapper/FieldMapper.java | 15 ++ .../index/mapper/RootObjectMapper.java | 2 +- .../index/mapper/DynamicFieldMapperTests.java | 213 ++++++++++++++++++ .../index/mapper/DynamicTemplatesTests.java | 9 +- 4 files changed, 233 insertions(+), 6 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldMapperTests.java diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index fe9bdd73cfa10..21a0c4d393a23 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -1466,6 +1466,14 @@ public static BiConsumer notInMultiFields(String t }; } + public static BiConsumer notFromDynamicTemplates(String type) { + return (n, c) -> { + if (c.isFromDynamicTemplate()) { + throw new MapperParsingException("Field [" + n + "] of type [" + type + "] can't be used in dynamic templates"); + } + }; + } + /** * TypeParser implementation that automatically handles parsing */ @@ -1498,6 +1506,13 @@ public TypeParser( this(builderFunction, contextValidator, IndexVersions.MINIMUM_COMPATIBLE); } + public TypeParser( + BiFunction builderFunction, + List> contextValidator + ) { + this(builderFunction, (n, c) -> contextValidator.forEach(v -> v.accept(n, c)), IndexVersions.MINIMUM_COMPATIBLE); + } + private TypeParser( BiFunction builderFunction, BiConsumer contextValidator, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 8db3a970e31c8..76b626558ac71 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -632,7 +632,7 @@ private static boolean processField( String templateName = entry.getKey(); Map templateParams = (Map) entry.getValue(); DynamicTemplate template = DynamicTemplate.parse(templateName, templateParams); - validateDynamicTemplate(parserContext, template); + validateDynamicTemplate(parserContext.createDynamicTemplateContext(null), template); templates.add(template); } builder.dynamicTemplates(templates); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldMapperTests.java new file mode 100644 index 0000000000000..6f47a9be84429 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldMapperTests.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class DynamicFieldMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return List.of(NonDynamicFieldPlugin.class); + } + + public void testCreateExplicitMappingSucceeds() throws Exception { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "non_dynamic" + } + } + } + } + """; + var resp = client().admin().indices().prepareCreate("test").setMapping(mapping).get(); + assertTrue(resp.isAcknowledged()); + var mappingsResp = client().admin().indices().prepareGetMappings("test").get(); + var mappingMetadata = mappingsResp.getMappings().get("test"); + var fieldType = XContentMapValues.extractValue("properties.field.type", mappingMetadata.getSourceAsMap()); + assertThat(fieldType, equalTo(NonDynamicFieldMapper.NAME)); + } + + public void testCreateDynamicMappingFails() throws Exception { + String mapping = """ + { + "_doc": { + "dynamic_templates": [ + { + "strings_as_type": { + "match_mapping_type": "string", + "mapping": { + "type": "non_dynamic" + } + } + } + ] + } + } + """; + CreateIndexRequestBuilder req = client().admin().indices().prepareCreate("test").setMapping(mapping); + Exception exc = expectThrows(Exception.class, () -> req.get()); + assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(exc.getCause().getCause(), instanceOf(MapperParsingException.class)); + assertThat(exc.getCause().getCause().getMessage(), containsString("[non_dynamic] can't be used in dynamic templates")); + } + + public void testUpdateDynamicMappingFails() throws Exception { + var resp = client().admin().indices().prepareCreate("test").get(); + assertTrue(resp.isAcknowledged()); + String mapping = """ + { + "_doc": { + "dynamic_templates": [ + { + "strings_as_type": { + "match_mapping_type": "string", + "mapping": { + "type": "non_dynamic" + } + } + } + ] + } + } + """; + var req = client().admin().indices().preparePutMapping("test").setSource(mapping, XContentType.JSON); + Exception exc = expectThrows(Exception.class, () -> req.get()); + assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(exc.getCause().getCause(), instanceOf(MapperParsingException.class)); + assertThat(exc.getCause().getCause().getMessage(), containsString("[non_dynamic] can't be used in dynamic templates")); + } + + public void testCreateDynamicMappingInIndexTemplateFails() throws Exception { + String mapping = """ + { + "_doc": { + "dynamic_templates": [ + { + "strings_as_type": { + "match_mapping_type": "string", + "mapping": { + "type": "non_dynamic" + } + } + } + ] + } + } + """; + PutIndexTemplateRequestBuilder req = client().admin() + .indices() + .preparePutTemplate("template1") + .setMapping(mapping, XContentType.JSON) + .setPatterns(List.of("test*")); + Exception exc = expectThrows(Exception.class, () -> req.get()); + assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(exc.getCause().getCause(), instanceOf(MapperParsingException.class)); + assertThat(exc.getCause().getCause().getMessage(), containsString("[non_dynamic] can't be used in dynamic templates")); + } + + public void testCreateExplicitMappingInIndexTemplateSucceeds() throws Exception { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "non_dynamic" + } + } + } + } + """; + PutIndexTemplateRequestBuilder req = client().admin() + .indices() + .preparePutTemplate("template1") + .setMapping(mapping, XContentType.JSON) + .setPatterns(List.of("test*")); + assertTrue(req.get().isAcknowledged()); + + var resp = client().prepareIndex("test1").setSource("field", "hello world").get(); + assertThat(resp.status(), equalTo(RestStatus.CREATED)); + + var mappingsResp = client().admin().indices().prepareGetMappings("test1").get(); + var mappingMetadata = mappingsResp.getMappings().get("test1"); + var fieldType = XContentMapValues.extractValue("properties.field.type", mappingMetadata.getSourceAsMap()); + assertThat(fieldType, equalTo(NonDynamicFieldMapper.NAME)); + } + + public static class NonDynamicFieldPlugin extends Plugin implements MapperPlugin { + public NonDynamicFieldPlugin() {} + + @Override + public Map getMappers() { + return Map.of(NonDynamicFieldMapper.NAME, NonDynamicFieldMapper.PARSER); + } + } + + private static class NonDynamicFieldMapper extends FieldMapper { + private static final String NAME = "non_dynamic"; + + private static final TypeParser PARSER = new TypeParser( + (n, c) -> new Builder(n), + List.of(notFromDynamicTemplates(NAME), notInMultiFields(NAME)) + ); + + private static class Builder extends FieldMapper.Builder { + private final Parameter> meta = Parameter.metaParam(); + + Builder(String name) { + super(name); + } + + @Override + protected Parameter[] getParameters() { + return new Parameter[] { meta }; + } + + @Override + public NonDynamicFieldMapper build(MapperBuilderContext context) { + return new NonDynamicFieldMapper(name(), new TextFieldMapper.TextFieldType(name(), false, true, meta.getValue())); + } + } + + private NonDynamicFieldMapper(String simpleName, MappedFieldType mappedFieldType) { + super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty()); + } + + @Override + protected String contentType() { + return NAME; + } + + @Override + protected void parseCreateField(DocumentParserContext context) throws IOException {} + + @Override + public FieldMapper.Builder getMergeBuilder() { + return new Builder(simpleName()).init(this); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 0f285992b749a..cd8a59a1a6ad5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -244,11 +244,10 @@ public void testDynamicMapperWithBadMapping() throws IOException { } b.endArray(); })); - assertWarnings(""" - dynamic template [test] has invalid content [{"match_mapping_type":"string","mapping":{"badparam":false}}], \ - attempted to validate it with the following match_mapping_type: [string], last error: \ - [unknown parameter [badparam] on mapper [__dynamic__test] of type [null]]"""); - + assertWarnings( + "Parameter [badparam] is used in a dynamic template mapping and has no effect on type [null]. " + + "Usage will result in an error in future major versions and should be removed." + ); mapper.parse(source(b -> b.field("field", "foo"))); assertWarnings( "Parameter [badparam] is used in a dynamic template mapping and has no effect on type [null]. " From fdbb21bba49a04667859ad2b26c7dee84a6cab0c Mon Sep 17 00:00:00 2001 From: Howard Date: Thu, 18 Apr 2024 21:57:28 +0800 Subject: [PATCH 099/130] Support effective watermark thresholds in node stats API (#107244) Adds to the `fs` component of the node stats API some additional values indicating the disk watermarks that are currently in effect. Relates #106676 --- docs/changelog/107244.yaml | 5 ++ .../nodes.stats/90_fs_watermark_stats.yml | 27 ++++++ .../allocation/DiskThresholdMonitorIT.java | 85 ++++++++++++++++++ server/src/main/java/module-info.java | 1 + .../org/elasticsearch/TransportVersions.java | 1 + .../allocation/AllocationStatsFeatures.java | 23 +++++ .../TransportGetAllocationStatsAction.java | 41 ++++++++- .../admin/cluster/node/stats/NodeStats.java | 8 +- .../node/stats/TransportNodesStatsAction.java | 17 +++- .../allocation/DiskThresholdSettings.java | 60 ++++++++++++- .../elasticsearch/common/unit/RatioValue.java | 16 +++- .../common/unit/RelativeByteSizeValue.java | 27 +++++- .../org/elasticsearch/monitor/fs/FsInfo.java | 87 +++++++++++++++++++ ...lasticsearch.features.FeatureSpecification | 1 + .../unit/RelativeByteSizeValueTests.java | 29 +++++++ 15 files changed, 415 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/107244.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java diff --git a/docs/changelog/107244.yaml b/docs/changelog/107244.yaml new file mode 100644 index 0000000000000..f805796674f93 --- /dev/null +++ b/docs/changelog/107244.yaml @@ -0,0 +1,5 @@ +pr: 107244 +summary: Support effective watermark thresholds in node stats API +area: Allocation +type: enhancement +issues: [106676] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml new file mode 100644 index 0000000000000..3ec854e93d82c --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.stats/90_fs_watermark_stats.yml @@ -0,0 +1,27 @@ +--- +"Allocation stats": + - requires: + cluster_features: ["stats.include_disk_thresholds"] + reason: "fs watermark stats was added in 8.15.0" + test_runner_features: [arbitrary_key] + + - do: + nodes.info: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + nodes.stats: + metric: [ fs ] + + - exists: nodes.$node_id.fs + - exists: nodes.$node_id.fs.data + - exists: nodes.$node_id.fs.data.0.path + - exists: nodes.$node_id.fs.data.0.mount + - exists: nodes.$node_id.fs.data.0.type + - exists: nodes.$node_id.fs.data.0.total_in_bytes + - exists: nodes.$node_id.fs.data.0.free_in_bytes + - exists: nodes.$node_id.fs.data.0.available_in_bytes + - exists: nodes.$node_id.fs.data.0.low_watermark_free_space_in_bytes + - exists: nodes.$node_id.fs.data.0.high_watermark_free_space_in_bytes + - exists: nodes.$node_id.fs.data.0.flood_stage_free_space_in_bytes diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorIT.java index 31e45e64d8afe..eb62ad5e6eec1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitorIT.java @@ -8,20 +8,32 @@ package org.elasticsearch.cluster.routing.allocation; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.cluster.DiskUsageIntegTestCase; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.NodeRoles; import java.util.Locale; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_WATERMARK_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_MAX_HEADROOM_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_MAX_HEADROOM_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_MAX_HEADROOM_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING; import static org.elasticsearch.index.store.Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; @@ -135,6 +147,79 @@ public void testRemoveExistingIndexBlocksWhenDiskThresholdMonitorIsDisabled() th assertThat(getIndexBlock(indexName, IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE), equalTo("true")); } + public void testNodeStatsIncludingDiskThreshold() { + Settings.Builder masterNodeSettings = Settings.builder(); + masterNodeSettings.put(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%") + .put(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "80%") + .put(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_WATERMARK_SETTING.getKey(), "90%"); + + internalCluster().startMasterOnlyNode(masterNodeSettings.build()); + final String dataNodeName = internalCluster().startDataOnlyNode(); + final String frozenNodeName = internalCluster().startNode(NodeRoles.onlyRole(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE)); + + getTestFileStore(dataNodeName).setTotalSpace(10000L); + getTestFileStore(frozenNodeName).setTotalSpace(20000L); + + // no cluster settings, but master node's yml watermark threshold is different, data node stats based on master node's thresholds + NodesStatsResponse nodeStats = clusterAdmin().prepareNodesStats().setAllocationStats(true).setFs(true).get(); + NodeStats dataNode = nodeStats.getNodes().stream().filter(n -> dataNodeName.equals(n.getNode().getName())).findFirst().get(); + FsInfo.Path path = dataNode.getFs().iterator().next(); + assertEquals(4000, path.getLowWatermarkFreeSpace().getBytes()); + assertEquals(3000, path.getHighWatermarkFreeSpace().getBytes()); + assertEquals(2000, path.getFloodStageWatermarkFreeSpace().getBytes()); + assertNull(path.getFrozenFloodStageWatermarkFreeSpace()); + + NodeStats frozenNode = nodeStats.getNodes().stream().filter(n -> frozenNodeName.equals(n.getNode().getName())).findFirst().get(); + path = frozenNode.getFs().iterator().next(); + assertEquals(8000, path.getLowWatermarkFreeSpace().getBytes()); + assertEquals(6000, path.getHighWatermarkFreeSpace().getBytes()); + assertEquals(4000, path.getFloodStageWatermarkFreeSpace().getBytes()); + assertEquals(2000, path.getFrozenFloodStageWatermarkFreeSpace().getBytes()); + + // update dynamic cluster settings, use absolute value, both data and master node have the same thresholds + updateClusterSettings( + Settings.builder() + .put(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "3000b") + .put(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "2000b") + .put(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1000b") + .put(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_WATERMARK_SETTING.getKey(), "500b") + ); + + nodeStats = clusterAdmin().prepareNodesStats().setAllocationStats(true).setFs(true).get(); + dataNode = nodeStats.getNodes().stream().filter(n -> dataNodeName.equals(n.getNode().getName())).findFirst().get(); + path = dataNode.getFs().iterator().next(); + assertEquals(3000, path.getLowWatermarkFreeSpace().getBytes()); + assertEquals(2000, path.getHighWatermarkFreeSpace().getBytes()); + assertEquals(1000, path.getFloodStageWatermarkFreeSpace().getBytes()); + assertNull(path.getFrozenFloodStageWatermarkFreeSpace()); + + frozenNode = nodeStats.getNodes().stream().filter(n -> frozenNodeName.equals(n.getNode().getName())).findFirst().get(); + path = frozenNode.getFs().iterator().next(); + assertEquals(3000, path.getLowWatermarkFreeSpace().getBytes()); + assertEquals(2000, path.getHighWatermarkFreeSpace().getBytes()); + assertEquals(1000, path.getFloodStageWatermarkFreeSpace().getBytes()); + assertEquals(500, path.getFrozenFloodStageWatermarkFreeSpace().getBytes()); + + // effective threshold percent calculated based on headroom + updateClusterSettings( + Settings.builder() + .put(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "60%") + .put(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "70%") + .put(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "80%") + .put(CLUSTER_ROUTING_ALLOCATION_LOW_DISK_MAX_HEADROOM_SETTING.getKey(), "500b") + .put(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_MAX_HEADROOM_SETTING.getKey(), "300b") + .put(CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_MAX_HEADROOM_SETTING.getKey(), "100b") + ); + + nodeStats = clusterAdmin().prepareNodesStats().setAllocationStats(true).setFs(true).get(); + dataNode = nodeStats.getNodes().stream().filter(n -> dataNodeName.equals(n.getNode().getName())).findFirst().get(); + path = dataNode.getFs().iterator().next(); + assertEquals(500, path.getLowWatermarkFreeSpace().getBytes()); + assertEquals(300, path.getHighWatermarkFreeSpace().getBytes()); + assertEquals(100, path.getFloodStageWatermarkFreeSpace().getBytes()); + } + // Retrieves the value of the given block on an index. private static String getIndexBlock(String indexName, String blockName) { return indicesAdmin().prepareGetSettings(indexName).setNames(blockName).get().getSetting(indexName, blockName); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 6e01cb01626ae..8295edfee12f4 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -423,6 +423,7 @@ org.elasticsearch.cluster.metadata.MetadataFeatures, org.elasticsearch.rest.RestFeatures, org.elasticsearch.indices.IndicesFeatures, + org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.search.retriever.RetrieversFeatures; uses org.elasticsearch.plugins.internal.SettingsExtension; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5c5a536034508..a66d96587c5e5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -175,6 +175,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_EMBEDDINGS = def(8_634_00_0); public static final TransportVersion ILM_SHRINK_ENABLE_WRITE = def(8_635_00_0); public static final TransportVersion GEOIP_CACHE_STATS = def(8_636_00_0); + public static final TransportVersion WATERMARK_THRESHOLDS_STATS = def(8_637_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java new file mode 100644 index 0000000000000..c74a23c6b17ea --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/AllocationStatsFeatures.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.allocation; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +public class AllocationStatsFeatures implements FeatureSpecification { + public static final NodeFeature INCLUDE_DISK_THRESHOLD_SETTINGS = new NodeFeature("stats.include_disk_thresholds"); + + @Override + public Set getFeatures() { + return Set.of(INCLUDE_DISK_THRESHOLD_SETTINGS); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index a17a627342c4f..75434ff554b9c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -21,11 +21,14 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.allocation.AllocationStatsService; +import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -41,6 +44,8 @@ public class TransportGetAllocationStatsAction extends TransportMasterNodeReadAc public static final ActionType TYPE = new ActionType<>("cluster:monitor/allocation/stats"); private final AllocationStatsService allocationStatsService; + private final DiskThresholdSettings diskThresholdSettings; + private final FeatureService featureService; @Inject public TransportGetAllocationStatsAction( @@ -49,7 +54,8 @@ public TransportGetAllocationStatsAction( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - AllocationStatsService allocationStatsService + AllocationStatsService allocationStatsService, + FeatureService featureService ) { super( TYPE.name(), @@ -63,13 +69,15 @@ public TransportGetAllocationStatsAction( threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.allocationStatsService = allocationStatsService; + this.diskThresholdSettings = new DiskThresholdSettings(clusterService.getSettings(), clusterService.getClusterSettings()); + this.featureService = featureService; } @Override protected void doExecute(Task task, Request request, ActionListener listener) { if (clusterService.state().getMinTransportVersion().before(TransportVersions.ALLOCATION_STATS)) { // The action is not available before ALLOCATION_STATS - listener.onResponse(new Response(Map.of())); + listener.onResponse(new Response(Map.of(), null)); return; } super.doExecute(task, request, listener); @@ -77,7 +85,14 @@ protected void doExecute(Task task, Request request, ActionListener li @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { - listener.onResponse(new Response(allocationStatsService.stats())); + listener.onResponse( + new Response( + allocationStatsService.stats(), + featureService.clusterHasFeature(clusterService.state(), AllocationStatsFeatures.INCLUDE_DISK_THRESHOLD_SETTINGS) + ? diskThresholdSettings + : null + ) + ); } @Override @@ -110,23 +125,41 @@ public ActionRequestValidationException validate() { public static class Response extends ActionResponse { private final Map nodeAllocationStats; + @Nullable // for bwc + private final DiskThresholdSettings diskThresholdSettings; - public Response(Map nodeAllocationStats) { + public Response(Map nodeAllocationStats, DiskThresholdSettings diskThresholdSettings) { this.nodeAllocationStats = nodeAllocationStats; + this.diskThresholdSettings = diskThresholdSettings; } public Response(StreamInput in) throws IOException { super(in); this.nodeAllocationStats = in.readImmutableMap(StreamInput::readString, NodeAllocationStats::new); + if (in.getTransportVersion().onOrAfter(TransportVersions.WATERMARK_THRESHOLDS_STATS)) { + this.diskThresholdSettings = in.readOptionalWriteable(DiskThresholdSettings::readFrom); + } else { + this.diskThresholdSettings = null; + } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(nodeAllocationStats, StreamOutput::writeString, StreamOutput::writeWriteable); + if (out.getTransportVersion().onOrAfter(TransportVersions.WATERMARK_THRESHOLDS_STATS)) { + out.writeOptionalWriteable(diskThresholdSettings); + } else { + assert diskThresholdSettings == null; + } } public Map getNodeAllocationStats() { return nodeAllocationStats; } + + @Nullable // for bwc + public DiskThresholdSettings getDiskThresholdSettings() { + return diskThresholdSettings; + } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index 8fcb5a320bd41..a438983e855e9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; @@ -171,7 +172,10 @@ public NodeStats( this.nodeAllocationStats = nodeAllocationStats; } - public NodeStats withNodeAllocationStats(@Nullable NodeAllocationStats nodeAllocationStats) { + public NodeStats withNodeAllocationStats( + @Nullable NodeAllocationStats nodeAllocationStats, + @Nullable DiskThresholdSettings masterThresholdSettings + ) { return new NodeStats( getNode(), timestamp, @@ -180,7 +184,7 @@ public NodeStats withNodeAllocationStats(@Nullable NodeAllocationStats nodeAlloc process, jvm, threadPool, - fs, + FsInfo.setEffectiveWatermarks(fs, masterThresholdSettings, getNode().isDedicatedFrozenNode()), transport, http, breaker, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 6ff2303997482..2c857f6b0898b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -85,12 +86,16 @@ protected void newResponseAsync( ActionListener listener ) { Set metrics = request.getNodesStatsRequestParameters().requestedMetrics(); - if (NodesStatsRequestParameters.Metric.ALLOCATIONS.containedIn(metrics)) { + if (NodesStatsRequestParameters.Metric.ALLOCATIONS.containedIn(metrics) + || NodesStatsRequestParameters.Metric.FS.containedIn(metrics)) { client.execute( TransportGetAllocationStatsAction.TYPE, new TransportGetAllocationStatsAction.Request(new TaskId(clusterService.localNode().getId(), task.getId())), listener.delegateFailure((l, r) -> { - ActionListener.respondAndRelease(l, newResponse(request, merge(responses, r.getNodeAllocationStats()), failures)); + ActionListener.respondAndRelease( + l, + newResponse(request, merge(responses, r.getNodeAllocationStats(), r.getDiskThresholdSettings()), failures) + ); }) ); } else { @@ -98,9 +103,13 @@ protected void newResponseAsync( } } - private static List merge(List responses, Map allocationStats) { + private static List merge( + List responses, + Map allocationStats, + DiskThresholdSettings masterThresholdSettings + ) { return responses.stream() - .map(response -> response.withNodeAllocationStats(allocationStats.get(response.getNode().getId()))) + .map(response -> response.withNodeAllocationStats(allocationStats.get(response.getNode().getId()), masterThresholdSettings)) .toList(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java index ea0ee630ef073..fbcd4e6a8a717 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java @@ -8,6 +8,9 @@ package org.elasticsearch.cluster.routing.allocation; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -17,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; +import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -24,7 +28,7 @@ /** * A container to keep settings for disk thresholds up to date with cluster setting changes. */ -public class DiskThresholdSettings { +public class DiskThresholdSettings implements Writeable { public static final Setting CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING = Setting.boolSetting( "cluster.routing.allocation.disk.threshold_enabled", true, @@ -199,6 +203,60 @@ public DiskThresholdSettings(Settings settings, ClusterSettings clusterSettings) clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING, this::setEnabled); } + public DiskThresholdSettings( + RelativeByteSizeValue lowStageWatermark, + ByteSizeValue lowStageMaxHeadroom, + RelativeByteSizeValue highStageWatermark, + ByteSizeValue highStageMaxHeadroom, + RelativeByteSizeValue floodStageWatermark, + ByteSizeValue floodStageMaxHeadroom, + RelativeByteSizeValue frozenFloodStageWatermark, + ByteSizeValue frozenFloodStageMaxHeadroom + ) { + this.lowStageWatermark = lowStageWatermark; + this.lowStageMaxHeadroom = lowStageMaxHeadroom; + this.highStageWatermark = highStageWatermark; + this.highStageMaxHeadroom = highStageMaxHeadroom; + this.floodStageWatermark = floodStageWatermark; + this.floodStageMaxHeadroom = floodStageMaxHeadroom; + this.frozenFloodStageWatermark = frozenFloodStageWatermark; + this.frozenFloodStageMaxHeadroom = frozenFloodStageMaxHeadroom; + } + + public static DiskThresholdSettings readFrom(StreamInput in) throws IOException { + final var lowStageWatermark = RelativeByteSizeValue.readFrom(in); + final var lowStageMaxHeadroom = ByteSizeValue.readFrom(in); + final var highStageWatermark = RelativeByteSizeValue.readFrom(in); + final var highStageMaxHeadroom = ByteSizeValue.readFrom(in); + final var floodStageWatermark = RelativeByteSizeValue.readFrom(in); + final var floodStageMaxHeadroom = ByteSizeValue.readFrom(in); + final var frozenFloodStageWatermark = RelativeByteSizeValue.readFrom(in); + final var frozenFloodStageMaxHeadroom = ByteSizeValue.readFrom(in); + + return new DiskThresholdSettings( + lowStageWatermark, + lowStageMaxHeadroom, + highStageWatermark, + highStageMaxHeadroom, + floodStageWatermark, + floodStageMaxHeadroom, + frozenFloodStageWatermark, + frozenFloodStageMaxHeadroom + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + lowStageWatermark.writeTo(out); + lowStageMaxHeadroom.writeTo(out); + highStageWatermark.writeTo(out); + highStageMaxHeadroom.writeTo(out); + floodStageWatermark.writeTo(out); + floodStageMaxHeadroom.writeTo(out); + frozenFloodStageWatermark.writeTo(out); + frozenFloodStageMaxHeadroom.writeTo(out); + } + /** * Validates that low, high and flood stage watermarks are all either percentages or byte values, * and that their values adhere to the comparison: low < high < flood. Else, throws an exception. diff --git a/server/src/main/java/org/elasticsearch/common/unit/RatioValue.java b/server/src/main/java/org/elasticsearch/common/unit/RatioValue.java index 490515572f42c..7ae664d7ce26a 100644 --- a/server/src/main/java/org/elasticsearch/common/unit/RatioValue.java +++ b/server/src/main/java/org/elasticsearch/common/unit/RatioValue.java @@ -9,11 +9,16 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; /** * Utility class to represent ratio and percentage values between 0 and 100 */ -public class RatioValue { +public class RatioValue implements Writeable { private final double percent; public RatioValue(double percent) { @@ -81,4 +86,13 @@ public String formatNoTrailingZerosPercent() { return value.substring(0, Math.min(i + 1, value.length())) + "%"; } } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeDouble(percent); + } + + public static RatioValue readFrom(StreamInput in) throws IOException { + return new RatioValue(in.readDouble()); + } } diff --git a/server/src/main/java/org/elasticsearch/common/unit/RelativeByteSizeValue.java b/server/src/main/java/org/elasticsearch/common/unit/RelativeByteSizeValue.java index f8b63c4ec0895..f02bb073aeb8a 100644 --- a/server/src/main/java/org/elasticsearch/common/unit/RelativeByteSizeValue.java +++ b/server/src/main/java/org/elasticsearch/common/unit/RelativeByteSizeValue.java @@ -9,6 +9,11 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; /** * A byte size value that allows specification using either of: @@ -16,7 +21,7 @@ * 2. Relative percentage value (95%) * 3. Relative ratio value (0.95) */ -public class RelativeByteSizeValue { +public class RelativeByteSizeValue implements Writeable { private final ByteSizeValue absolute; private final RatioValue ratio; @@ -103,4 +108,24 @@ public String getStringRep() { return absolute.getStringRep(); } } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(isAbsolute()); + if (isAbsolute()) { + assert absolute != null; + absolute.writeTo(out); + } else { + assert ratio != null; + ratio.writeTo(out); + } + } + + public static RelativeByteSizeValue readFrom(StreamInput in) throws IOException { + if (in.readBoolean()) { + return new RelativeByteSizeValue(ByteSizeValue.readFrom(in)); + } else { + return new RelativeByteSizeValue(RatioValue.readFrom(in)); + } + } } diff --git a/server/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java b/server/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java index b721d8eb309cc..224aefb21a82b 100644 --- a/server/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java +++ b/server/src/main/java/org/elasticsearch/monitor/fs/FsInfo.java @@ -8,12 +8,15 @@ package org.elasticsearch.monitor.fs; +import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -36,6 +39,11 @@ public static class Path implements Writeable, ToXContentObject { long free = -1; long available = -1; + ByteSizeValue lowWatermarkFreeSpace = null; + ByteSizeValue highWatermarkFreeSpace = null; + ByteSizeValue floodStageWatermarkFreeSpace = null; + ByteSizeValue frozenFloodStageWatermarkFreeSpace = null; + public Path() {} public Path(String path, String mount, long total, long free, long available) { @@ -92,6 +100,35 @@ public ByteSizeValue getAvailable() { return ByteSizeValue.ofBytes(available); } + public void setEffectiveWatermarks(final DiskThresholdSettings masterThresholdSettings, boolean isDedicatedFrozenNode) { + lowWatermarkFreeSpace = masterThresholdSettings.getFreeBytesThresholdLowStage(new ByteSizeValue(total, ByteSizeUnit.BYTES)); + highWatermarkFreeSpace = masterThresholdSettings.getFreeBytesThresholdHighStage(new ByteSizeValue(total, ByteSizeUnit.BYTES)); + floodStageWatermarkFreeSpace = masterThresholdSettings.getFreeBytesThresholdFloodStage( + new ByteSizeValue(total, ByteSizeUnit.BYTES) + ); + if (isDedicatedFrozenNode) { + frozenFloodStageWatermarkFreeSpace = masterThresholdSettings.getFreeBytesThresholdFrozenFloodStage( + new ByteSizeValue(total, ByteSizeUnit.BYTES) + ); + } + } + + public ByteSizeValue getLowWatermarkFreeSpace() { + return lowWatermarkFreeSpace; + } + + public ByteSizeValue getHighWatermarkFreeSpace() { + return highWatermarkFreeSpace; + } + + public ByteSizeValue getFloodStageWatermarkFreeSpace() { + return floodStageWatermarkFreeSpace; + } + + public ByteSizeValue getFrozenFloodStageWatermarkFreeSpace() { + return frozenFloodStageWatermarkFreeSpace; + } + private static long addLong(long current, long other) { if (current == -1 && other == -1) { return 0; @@ -121,6 +158,14 @@ static final class Fields { static final String FREE_IN_BYTES = "free_in_bytes"; static final String AVAILABLE = "available"; static final String AVAILABLE_IN_BYTES = "available_in_bytes"; + static final String LOW_WATERMARK_FREE_SPACE = "low_watermark_free_space"; + static final String LOW_WATERMARK_FREE_SPACE_IN_BYTES = "low_watermark_free_space_in_bytes"; + static final String HIGH_WATERMARK_FREE_SPACE = "high_watermark_free_space"; + static final String HIGH_WATERMARK_FREE_SPACE_IN_BYTES = "high_watermark_free_space_in_bytes"; + static final String FLOOD_STAGE_FREE_SPACE = "flood_stage_free_space"; + static final String FLOOD_STAGE_FREE_SPACE_IN_BYTES = "flood_stage_free_space_in_bytes"; + static final String FROZEN_FLOOD_STAGE_FREE_SPACE = "frozen_flood_stage_free_space"; + static final String FROZEN_FLOOD_STAGE_FREE_SPACE_IN_BYTES = "frozen_flood_stage_free_space_in_bytes"; } @Override @@ -146,6 +191,35 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.humanReadableField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, getAvailable()); } + if (lowWatermarkFreeSpace != null) { + builder.humanReadableField( + Fields.LOW_WATERMARK_FREE_SPACE_IN_BYTES, + Fields.LOW_WATERMARK_FREE_SPACE, + getLowWatermarkFreeSpace() + ); + } + if (highWatermarkFreeSpace != null) { + builder.humanReadableField( + Fields.HIGH_WATERMARK_FREE_SPACE_IN_BYTES, + Fields.HIGH_WATERMARK_FREE_SPACE, + getHighWatermarkFreeSpace() + ); + } + if (floodStageWatermarkFreeSpace != null) { + builder.humanReadableField( + Fields.FLOOD_STAGE_FREE_SPACE_IN_BYTES, + Fields.FLOOD_STAGE_FREE_SPACE, + getFloodStageWatermarkFreeSpace() + ); + } + if (frozenFloodStageWatermarkFreeSpace != null) { + builder.humanReadableField( + Fields.FROZEN_FLOOD_STAGE_FREE_SPACE_IN_BYTES, + Fields.FROZEN_FLOOD_STAGE_FREE_SPACE, + getFrozenFloodStageWatermarkFreeSpace() + ); + } + builder.endObject(); return builder; } @@ -460,6 +534,19 @@ public FsInfo(StreamInput in) throws IOException { this.total = total(); } + public static FsInfo setEffectiveWatermarks( + @Nullable final FsInfo fsInfo, + @Nullable final DiskThresholdSettings masterThresholdSettings, + boolean isDedicatedFrozenNode + ) { + if (fsInfo != null && masterThresholdSettings != null) { + for (Path path : fsInfo.paths) { + path.setEffectiveWatermarks(masterThresholdSettings, isDedicatedFrozenNode); + } + } + return fsInfo; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(timestamp); diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 71b6aacd56ea7..cdb35cb9ac660 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -12,4 +12,5 @@ org.elasticsearch.cluster.service.TransportFeatures org.elasticsearch.cluster.metadata.MetadataFeatures org.elasticsearch.rest.RestFeatures org.elasticsearch.indices.IndicesFeatures +org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.search.retriever.RetrieversFeatures diff --git a/server/src/test/java/org/elasticsearch/common/unit/RelativeByteSizeValueTests.java b/server/src/test/java/org/elasticsearch/common/unit/RelativeByteSizeValueTests.java index 3a978157e878b..3688fd033b19d 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/RelativeByteSizeValueTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/RelativeByteSizeValueTests.java @@ -9,13 +9,42 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class RelativeByteSizeValueTests extends ESTestCase { + public void testDeserialization() throws IOException { + final var origin1 = new RelativeByteSizeValue(new ByteSizeValue(between(0, 2048), randomFrom(ByteSizeUnit.values()))); + final var origin2 = new RelativeByteSizeValue(new RatioValue(randomDoubleBetween(0.0, 100.0, true))); + final RelativeByteSizeValue target1, target2; + + try (var out = new BytesStreamOutput()) { + origin1.writeTo(out); + origin2.writeTo(out); + try (var in = out.bytes().streamInput()) { + target1 = RelativeByteSizeValue.readFrom(in); + target2 = RelativeByteSizeValue.readFrom(in); + } + } + + assertTrue(origin1.isAbsolute()); + assertTrue(target1.isAbsolute()); + assertNull(origin1.getRatio()); + assertNull(target1.getRatio()); + assertEquals(origin1.getAbsolute(), target1.getAbsolute()); + assertEquals(origin1.getAbsolute().getUnit(), target1.getAbsolute().getUnit()); + + assertFalse(origin2.isAbsolute()); + assertFalse(target2.isAbsolute()); + assertEquals(origin2.getRatio().getAsPercent(), target2.getRatio().getAsPercent(), 0.0); + } + public void testPercentage() { double value = randomIntBetween(0, 100); RelativeByteSizeValue parsed = RelativeByteSizeValue.parseRelativeByteSizeValue(value + "%", "test"); From f1bcb338ec79d00396f1e7a6154fe71eef4c80b2 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 18 Apr 2024 16:20:46 +0200 Subject: [PATCH 100/130] SQL: Limit how much space some string functions can use (#107333) This will check and fail if certain functions would generate a result exceeding a certain fixed byte size. This prevents an operation/query to fail the entire VM. --- docs/changelog/107333.yaml | 18 ++++++++++++++++++ docs/reference/sql/functions/string.asciidoc | 10 ++++++++++ .../string/BinaryStringNumericProcessor.java | 10 ++++------ .../string/ConcatFunctionProcessor.java | 7 ++++++- .../string/InsertFunctionProcessor.java | 6 +++++- .../string/ReplaceFunctionProcessor.java | 19 ++++++++++++++----- .../scalar/string/StringProcessor.java | 16 ++++++++++++++++ .../BinaryStringNumericProcessorTests.java | 15 +++++++++++++++ .../scalar/string/ConcatProcessorTests.java | 9 +++++++++ .../scalar/string/InsertProcessorTests.java | 18 ++++++++++++++++++ .../scalar/string/ReplaceProcessorTests.java | 13 +++++++++++++ .../string/StringFunctionProcessorTests.java | 13 +++++++++++++ 12 files changed, 141 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/107333.yaml diff --git a/docs/changelog/107333.yaml b/docs/changelog/107333.yaml new file mode 100644 index 0000000000000..0762e9a19795c --- /dev/null +++ b/docs/changelog/107333.yaml @@ -0,0 +1,18 @@ +pr: 107333 +summary: Limit how much space some string functions can use +area: SQL +type: breaking +issues: [] +breaking: + title: Limit how much space some string functions can use + area: REST API + details: "Before this change, some of the string functions could return a result\ + \ of any arbitrary length, which could force the VM to allocate large chunks of\ + \ memory or even make it exit. Any user with access to the SQL API can invoke\ + \ these functions. This change introduces a limitation of how much memory the\ + \ result returned by a function call can consume. The functions affected by this\ + \ change are: CONCAT, INSERT, REPEAT, REPLACE and SPACE." + impact: "The affected functions used to return a result of any length. After this\ + \ change, a result can no longer exceed 1MB in length. Note that this is a bytes\ + \ length, the character count may be lower." + notable: false diff --git a/docs/reference/sql/functions/string.asciidoc b/docs/reference/sql/functions/string.asciidoc index 318b8d6996415..2535ccbe0556d 100644 --- a/docs/reference/sql/functions/string.asciidoc +++ b/docs/reference/sql/functions/string.asciidoc @@ -109,6 +109,8 @@ CONCAT( *Description*: Returns a character string that is the result of concatenating `string_exp1` to `string_exp2`. +The resulting string cannot exceed a byte length of 1 MB. + [source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringConcat] @@ -137,6 +139,8 @@ INSERT( *Description*: Returns a string where `length` characters have been deleted from `source`, beginning at `start`, and where `replacement` has been inserted into `source`, beginning at `start`. +The resulting string cannot exceed a byte length of 1 MB. + [source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringInsert] @@ -330,6 +334,8 @@ REPEAT( *Description*: Returns a character string composed of `string_exp` repeated `count` times. +The resulting string cannot exceed a byte length of 1 MB. + [source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringRepeat] @@ -356,6 +362,8 @@ REPLACE( *Description*: Search `source` for occurrences of `pattern`, and replace with `replacement`. +The resulting string cannot exceed a byte length of 1 MB. + [source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringReplace] @@ -423,6 +431,8 @@ SPACE(count) <1> *Description*: Returns a character string consisting of `count` spaces. +The resulting string cannot exceed a byte length of 1 MB. + [source, sql] -------------------------------------------------- include-tagged::{sql-specs}/docs/docs.csv-spec[stringSpace] diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java index 434c5a66cd06e..6cab7c5d3f0b0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessor.java @@ -16,6 +16,8 @@ import java.io.IOException; import java.util.function.BiFunction; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.checkResultLength; + /** * Processor class covering string manipulating functions that have the first parameter as string, * second parameter as numeric and a string result. @@ -42,12 +44,8 @@ public enum BinaryStringNumericOperation implements BiFunction op) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java index 3d7e403468b7d..2eb36f35240d1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionProcessor.java @@ -16,6 +16,8 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.checkResultLength; + public class ConcatFunctionProcessor extends BinaryProcessor { public static final String NAME = "scon"; @@ -62,7 +64,10 @@ public static Object process(Object source1, Object source2) { throw new SqlIllegalArgumentException("A string/char is required; received [{}]", source2); } - return source1.toString().concat(source2.toString()); + String str1 = source1.toString(); + String str2 = source2.toString(); + checkResultLength(str1.length() + str2.length()); + return str1.concat(str2); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java index ebd3dec176258..98a06ea58ea72 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertFunctionProcessor.java @@ -15,6 +15,8 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.checkResultLength; + public class InsertFunctionProcessor implements Processor { private final Processor input, start, length, replacement; @@ -71,7 +73,9 @@ public static Object doProcess(Object input, Object start, Object length, Object StringBuilder sb = new StringBuilder(input.toString()); String replString = (replacement.toString()); - return sb.replace(realStart, realStart + ((Number) length).intValue(), replString).toString(); + int cutLength = ((Number) length).intValue(); + checkResultLength(sb.length() - cutLength + replString.length()); + return sb.replace(realStart, realStart + cutLength, replString).toString(); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java index 98c35036c272c..9c08dc17269e1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceFunctionProcessor.java @@ -58,11 +58,20 @@ public static Object doProcess(Object input, Object pattern, Object replacement) throw new SqlIllegalArgumentException("A string/char is required; received [{}]", replacement); } - return Strings.replace( - input instanceof Character ? input.toString() : (String) input, - pattern instanceof Character ? pattern.toString() : (String) pattern, - replacement instanceof Character ? replacement.toString() : (String) replacement - ); + String inputStr = input instanceof Character ? input.toString() : (String) input; + String patternStr = pattern instanceof Character ? pattern.toString() : (String) pattern; + String replacementStr = replacement instanceof Character ? replacement.toString() : (String) replacement; + checkResultLength(inputStr, patternStr, replacementStr); + return Strings.replace(inputStr, patternStr, replacementStr); + } + + private static void checkResultLength(String input, String pattern, String replacement) { + int patternLen = pattern.length(); + long matches = 0; + for (int i = input.indexOf(pattern); i >= 0; i = input.indexOf(pattern, i + patternLen)) { + matches++; + } + StringProcessor.checkResultLength(input.length() + matches * (replacement.length() - patternLen)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java index 4d922b6c10ea1..f90b625997216 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java @@ -17,8 +17,12 @@ import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.common.unit.ByteSizeUnit.MB; + public class StringProcessor implements Processor { + static final long MAX_RESULT_LENGTH = MB.toBytes(1); + private interface StringFunction { default R apply(Object o) { if ((o instanceof String || o instanceof Character) == false) { @@ -60,6 +64,7 @@ public enum StringOperation { if (i < 0) { return null; } + checkResultLength(n.longValue()); char[] spaces = new char[i]; char whitespace = ' '; Arrays.fill(spaces, whitespace); @@ -125,6 +130,17 @@ StringOperation processor() { return processor; } + static void checkResultLength(long needed) { + if (needed > MAX_RESULT_LENGTH) { + throw new SqlIllegalArgumentException( + "Required result length [{}] exceeds implementation limit [{}] bytes", + needed, + MAX_RESULT_LENGTH + ); + } + + } + @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java index e4fa7dab1db9c..4758feef1e8b9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BinaryStringNumericProcessorTests.java @@ -18,6 +18,8 @@ import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringFunctionProcessorTests.maxResultLengthTest; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.MAX_RESULT_LENGTH; public class BinaryStringNumericProcessorTests extends AbstractWireSerializingTestCase { @@ -150,6 +152,10 @@ public void testRepeatFunctionWithEdgeCases() { assertNull(new Repeat(EMPTY, l("foo"), l(-1)).makePipe().asProcessor().process(null)); assertNull(new Repeat(EMPTY, l("foo"), l(0)).makePipe().asProcessor().process(null)); assertNull(new Repeat(EMPTY, l('f'), l(Integer.MIN_VALUE)).makePipe().asProcessor().process(null)); + assertEquals( + MAX_RESULT_LENGTH, + new Repeat(EMPTY, l('f'), l(MAX_RESULT_LENGTH)).makePipe().asProcessor().process(null).toString().length() + ); } public void testRepeatFunctionInputsValidation() { @@ -179,5 +185,14 @@ public void testRepeatFunctionInputsValidation() { e = expectThrows(InvalidArgumentException.class, () -> new Repeat(EMPTY, l("foo"), l(1.0)).makePipe().asProcessor().process(null)); assertEquals("A fixed point number is required for [count]; received [java.lang.Double]", e.getMessage()); + + maxResultLengthTest( + MAX_RESULT_LENGTH + 1, + () -> new Repeat(EMPTY, l("f"), l(MAX_RESULT_LENGTH + 1)).makePipe().asProcessor().process(null) + ); + + String str = "foo"; + long count = (MAX_RESULT_LENGTH / str.length()) + 1; + maxResultLengthTest(count * str.length(), () -> new Repeat(EMPTY, l(str), l(count)).makePipe().asProcessor().process(null)); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java index 3f5699dab7163..2662e9d7cd991 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatProcessorTests.java @@ -16,6 +16,8 @@ import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringFunctionProcessorTests.maxResultLengthTest; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.MAX_RESULT_LENGTH; public class ConcatProcessorTests extends AbstractWireSerializingTestCase { @@ -65,4 +67,11 @@ public void testConcatFunctionInputsValidation() { ); assertEquals("A string/char is required; received [3]", siae.getMessage()); } + + public void testMaxResultLength() { + String str = "a".repeat((int) MAX_RESULT_LENGTH - 1); + assertEquals(MAX_RESULT_LENGTH, new Concat(EMPTY, l(str), l("b")).makePipe().asProcessor().process(null).toString().length()); + + maxResultLengthTest(MAX_RESULT_LENGTH + 1, () -> new Concat(EMPTY, l(str), l("bb")).makePipe().asProcessor().process(null)); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java index 46beb99eb9a70..b93bb25fc6727 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/InsertProcessorTests.java @@ -17,6 +17,8 @@ import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringFunctionProcessorTests.maxResultLengthTest; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.MAX_RESULT_LENGTH; public class InsertProcessorTests extends AbstractWireSerializingTestCase { @@ -123,5 +125,21 @@ public void testInsertInputsValidation() { () -> new Insert(EMPTY, l("foobar"), l(1), l((long) Integer.MAX_VALUE + 1), l("bar")).makePipe().asProcessor().process(null) ); assertEquals("[length] out of the allowed range [0, 2147483647], received [2147483648]", e.getMessage()); + + String str = "a".repeat((int) MAX_RESULT_LENGTH); + String replaceWith = "bar"; + assertEquals( + MAX_RESULT_LENGTH, + new Insert(EMPTY, l(str), l(1), l(replaceWith.length()), l(replaceWith)).makePipe() + .asProcessor() + .process(null) + .toString() + .length() + ); + + maxResultLengthTest( + MAX_RESULT_LENGTH + 1, + () -> new Insert(EMPTY, l(str), l(1), l(replaceWith.length() - 1), l(replaceWith)).makePipe().asProcessor().process(null) + ); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java index 83f20cfd77ee0..237933a354ce8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ReplaceProcessorTests.java @@ -16,6 +16,8 @@ import static org.elasticsearch.xpack.ql.expression.function.scalar.FunctionTestUtils.l; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringFunctionProcessorTests.maxResultLengthTest; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.MAX_RESULT_LENGTH; public class ReplaceProcessorTests extends AbstractWireSerializingTestCase { @@ -72,5 +74,16 @@ public void testReplaceFunctionInputsValidation() { () -> new Replace(EMPTY, l("foobarbar"), l("bar"), l(3)).makePipe().asProcessor().process(null) ); assertEquals("A string/char is required; received [3]", siae.getMessage()); + + String str = "b" + "a".repeat((int) MAX_RESULT_LENGTH - 2) + "b"; + assertEquals( + MAX_RESULT_LENGTH, + new Replace(EMPTY, l(str), l("b"), l("c")).makePipe().asProcessor().process(null).toString().length() + ); + + maxResultLengthTest( + MAX_RESULT_LENGTH + 2, + () -> new Replace(EMPTY, l(str), l("b"), l("cc")).makePipe().asProcessor().process(null) + ); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java index c1dc98571999f..1b3b103f575a0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java @@ -13,6 +13,8 @@ import java.util.Locale; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.MAX_RESULT_LENGTH; + public class StringFunctionProcessorTests extends AbstractWireSerializingTestCase { public static StringProcessor randomStringFunctionProcessor() { return new StringProcessor(randomFrom(StringOperation.values())); @@ -196,9 +198,20 @@ public void testSpace() { assertEquals("", proc.process(0)); assertNull(proc.process(-1)); + assertEquals(MAX_RESULT_LENGTH, proc.process(MAX_RESULT_LENGTH).toString().length()); + maxResultLengthTest(MAX_RESULT_LENGTH + 1, () -> proc.process(MAX_RESULT_LENGTH + 1)); + numericInputValidation(proc); } + static void maxResultLengthTest(long required, ThrowingRunnable runnable) { + Exception e = expectThrows(SqlIllegalArgumentException.class, runnable); + assertEquals( + "Required result length [" + required + "] exceeds implementation limit [" + MAX_RESULT_LENGTH + "] bytes", + e.getMessage() + ); + } + public void testBitLength() { StringProcessor proc = new StringProcessor(StringOperation.BIT_LENGTH); assertNull(proc.process(null)); From d7c55575cb52ea5e457fbc3b290dcda70918aec3 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 18 Apr 2024 16:37:31 +0200 Subject: [PATCH 101/130] ESQL: Introduce a casting operator, `::` (#107409) This adds support for an "inline" casting operator, `::`. The operator can be used with literals, fields and functions. The operator delegates to one the existing `to_xxx()` conversion functions, so only the types for which such a converter exist can be cast to with the new opeator. For convenience, a list of type name aliases are introduced as well. Example: `ROW 1::bool, "1"::int` --- docs/changelog/107409.yaml | 5 + .../src/main/resources/convert.csv-spec | 169 ++ .../esql/src/main/antlr/EsqlBaseLexer.g4 | 1 + .../esql/src/main/antlr/EsqlBaseLexer.tokens | 232 +-- .../esql/src/main/antlr/EsqlBaseParser.g4 | 5 + .../esql/src/main/antlr/EsqlBaseParser.tokens | 232 +-- .../xpack/esql/parser/EsqlBaseLexer.interp | 5 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1482 +++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 5 +- .../xpack/esql/parser/EsqlBaseParser.java | 1642 +++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 36 +- .../parser/EsqlBaseParserBaseVisitor.java | 20 +- .../esql/parser/EsqlBaseParserListener.java | 48 +- .../esql/parser/EsqlBaseParserVisitor.java | 26 +- .../xpack/esql/parser/ExpressionBuilder.java | 22 + .../xpack/esql/plugin/EsqlFeatures.java | 8 +- .../esql/type/EsqlDataTypeConverter.java | 55 + .../xpack/esql/type/EsqlDataTypes.java | 15 + .../xpack/esql/analysis/VerifierTests.java | 4 + .../esql/parser/StatementParserTests.java | 12 + 20 files changed, 2275 insertions(+), 1749 deletions(-) create mode 100644 docs/changelog/107409.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec diff --git a/docs/changelog/107409.yaml b/docs/changelog/107409.yaml new file mode 100644 index 0000000000000..6f2350239772f --- /dev/null +++ b/docs/changelog/107409.yaml @@ -0,0 +1,5 @@ +pr: 107409 +summary: "ESQL: Introduce a casting operator, `::`" +area: ES|QL +type: feature +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec new file mode 100644 index 0000000000000..dd495f1f9bd12 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec @@ -0,0 +1,169 @@ +// Conversion-specific tests + +convertToBoolean +required_feature: esql.casting_operator +ROW zero=0::boolean, one=1::bool +; + + zero:boolean | one:boolean +false |true +; + +convertToInteger +required_feature: esql.casting_operator +ROW zero="0"::integer, one="1"::int +; + + zero:integer | one:integer +0 |1 +; + +convertToIP +required_feature: esql.casting_operator +ROW ip="1.1.1.1"::ip +; + + ip:ip +1.1.1.1 +; + +convertToLong +required_feature: esql.casting_operator +ROW long="-1"::long +; + +long:long +-1 +; + +convertToLongWithWarning +required_feature: esql.casting_operator +ROW long="1.1.1.1"::long +; +warning:Line 1:10: evaluation of [\"1.1.1.1\"::long] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:10: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [1.1.1.1] + +long:long +null +; + +convertToDouble +required_feature: esql.casting_operator +ROW zero="0"::double +; + + zero:double +0.0 +; + +convertToString +required_feature: esql.casting_operator +ROW one=1::keyword, two=2::text, three=3::string +; + + one:keyword | two:keyword | three:keyword +1 |2 |3 +; + +convertToDatetime +required_feature: esql.casting_operator +ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime +; + + date:datetime | zero:datetime +1985-01-01T00:00:00.000Z|1970-01-01T00:00:00.000Z +; + +convertToVersion +required_feature: esql.casting_operator +ROW ver="1.2.3"::version +; + + ver:version +1.2.3 +; + +convertToUnsignedLong +required_feature: esql.casting_operator +ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng +; + + zero:unsigned_long | two:unsigned_long +0 |2 +; + +convertToGeoPoint +required_feature: esql.casting_operator +ROW gp="POINT(0 0)"::geo_point +; + + gp:geo_point +POINT (0.0 0.0) +; + +convertToGeoShape +required_feature: esql.casting_operator +ROW gs="POINT(0 0)"::geo_shape +; + + gs:geo_shape +POINT (0.0 0.0) +; + +convertToCartesianPoint +required_feature: esql.casting_operator +ROW cp="POINT(0 0)"::cartesian_point +; + + cp:cartesian_point +POINT (0.0 0.0) +; + +convertToCartesianShape +required_feature: esql.casting_operator +ROW cs="POINT(0 0)"::cartesian_shape +; + + cs:cartesian_shape +POINT (0.0 0.0) +; + +convertChained +required_feature: esql.casting_operator +ROW one=1::STRING::LONG::BOOL +; + +one:boolean +true +; + +convertWithIndexMultipleConversionsInSameExpressionAndConversionInFiltering +required_feature: esql.casting_operator + FROM employees +| EVAL en_str=emp_no::STRING, bd=ABS(birth_date::LONG)::STRING +| KEEP en_str, emp_no, bd, birth_date +| WHERE ABS(birth_date::LONG) < 176169600000 +| SORT emp_no +; + + en_str:keyword| emp_no:integer| bd:keyword | birth_date:datetime +10092 |10092 |164246400000 |1964-10-18T00:00:00.000Z +10093 |10093 |175392000000 |1964-06-11T00:00:00.000Z +10095 |10095 |157593600000 |1965-01-03T00:00:00.000Z +; + +convertWithBoolExpressionAndQualifiedName +required_feature: esql.casting_operator + FROM employees +| EVAL neg = (NOT still_hired)::string, sf = ROUND(height.scaled_float::double, 2) +| KEEP emp_no, still_hired, neg, sf +| SORT emp_no +| WHERE neg == "false" +| LIMIT 3 +; + + emp_no:integer| still_hired:boolean | neg:keyword | sf:double +10001 |true |false |2.03 +10002 |true |false |2.08 +10004 |true |false |1.78 +; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 9bc3d695ee9fa..c4a3dc7c56615 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -110,6 +110,7 @@ BY : 'by'; AND : 'and'; ASC : 'asc'; ASSIGN : '='; +CAST_OP : '::'; COMMA : ','; DESC : 'desc'; DOT : '.'; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index fc02831fc219f..b496aa68b61f7 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -31,82 +31,83 @@ BY=30 AND=31 ASC=32 ASSIGN=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -LAST=39 -LP=40 -IN=41 -IS=42 -LIKE=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -OPENING_BRACKET=64 -CLOSING_BRACKET=65 -UNQUOTED_IDENTIFIER=66 -QUOTED_IDENTIFIER=67 -EXPR_LINE_COMMENT=68 -EXPR_MULTILINE_COMMENT=69 -EXPR_WS=70 -OPTIONS=71 -METADATA=72 -FROM_UNQUOTED_IDENTIFIER=73 -FROM_LINE_COMMENT=74 -FROM_MULTILINE_COMMENT=75 -FROM_WS=76 -ID_PATTERN=77 -PROJECT_LINE_COMMENT=78 -PROJECT_MULTILINE_COMMENT=79 -PROJECT_WS=80 -AS=81 -RENAME_LINE_COMMENT=82 -RENAME_MULTILINE_COMMENT=83 -RENAME_WS=84 -ON=85 -WITH=86 -ENRICH_POLICY_NAME=87 -ENRICH_LINE_COMMENT=88 -ENRICH_MULTILINE_COMMENT=89 -ENRICH_WS=90 -ENRICH_FIELD_LINE_COMMENT=91 -ENRICH_FIELD_MULTILINE_COMMENT=92 -ENRICH_FIELD_WS=93 -MVEXPAND_LINE_COMMENT=94 -MVEXPAND_MULTILINE_COMMENT=95 -MVEXPAND_WS=96 -INFO=97 -SHOW_LINE_COMMENT=98 -SHOW_MULTILINE_COMMENT=99 -SHOW_WS=100 -FUNCTIONS=101 -META_LINE_COMMENT=102 -META_MULTILINE_COMMENT=103 -META_WS=104 -COLON=105 -SETTING=106 -SETTING_LINE_COMMENT=107 -SETTTING_MULTILINE_COMMENT=108 -SETTING_WS=109 +CAST_OP=34 +COMMA=35 +DESC=36 +DOT=37 +FALSE=38 +FIRST=39 +LAST=40 +LP=41 +IN=42 +IS=43 +LIKE=44 +NOT=45 +NULL=46 +NULLS=47 +OR=48 +PARAM=49 +RLIKE=50 +RP=51 +TRUE=52 +EQ=53 +CIEQ=54 +NEQ=55 +LT=56 +LTE=57 +GT=58 +GTE=59 +PLUS=60 +MINUS=61 +ASTERISK=62 +SLASH=63 +PERCENT=64 +OPENING_BRACKET=65 +CLOSING_BRACKET=66 +UNQUOTED_IDENTIFIER=67 +QUOTED_IDENTIFIER=68 +EXPR_LINE_COMMENT=69 +EXPR_MULTILINE_COMMENT=70 +EXPR_WS=71 +OPTIONS=72 +METADATA=73 +FROM_UNQUOTED_IDENTIFIER=74 +FROM_LINE_COMMENT=75 +FROM_MULTILINE_COMMENT=76 +FROM_WS=77 +ID_PATTERN=78 +PROJECT_LINE_COMMENT=79 +PROJECT_MULTILINE_COMMENT=80 +PROJECT_WS=81 +AS=82 +RENAME_LINE_COMMENT=83 +RENAME_MULTILINE_COMMENT=84 +RENAME_WS=85 +ON=86 +WITH=87 +ENRICH_POLICY_NAME=88 +ENRICH_LINE_COMMENT=89 +ENRICH_MULTILINE_COMMENT=90 +ENRICH_WS=91 +ENRICH_FIELD_LINE_COMMENT=92 +ENRICH_FIELD_MULTILINE_COMMENT=93 +ENRICH_FIELD_WS=94 +MVEXPAND_LINE_COMMENT=95 +MVEXPAND_MULTILINE_COMMENT=96 +MVEXPAND_WS=97 +INFO=98 +SHOW_LINE_COMMENT=99 +SHOW_MULTILINE_COMMENT=100 +SHOW_WS=101 +FUNCTIONS=102 +META_LINE_COMMENT=103 +META_MULTILINE_COMMENT=104 +META_WS=105 +COLON=106 +SETTING=107 +SETTING_LINE_COMMENT=108 +SETTTING_MULTILINE_COMMENT=109 +SETTING_WS=110 'dissect'=1 'drop'=2 'enrich'=3 @@ -130,42 +131,43 @@ SETTING_WS=109 'and'=31 'asc'=32 '='=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'last'=39 -'('=40 -'in'=41 -'is'=42 -'like'=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=65 -'options'=71 -'metadata'=72 -'as'=81 -'on'=85 -'with'=86 -'info'=97 -'functions'=101 -':'=105 +'::'=34 +','=35 +'desc'=36 +'.'=37 +'false'=38 +'first'=39 +'last'=40 +'('=41 +'in'=42 +'is'=43 +'like'=44 +'not'=45 +'null'=46 +'nulls'=47 +'or'=48 +'?'=49 +'rlike'=50 +')'=51 +'true'=52 +'=='=53 +'=~'=54 +'!='=55 +'<'=56 +'<='=57 +'>'=58 +'>='=59 +'+'=60 +'-'=61 +'*'=62 +'/'=63 +'%'=64 +']'=66 +'options'=72 +'metadata'=73 +'as'=82 +'on'=86 +'with'=87 +'info'=98 +'functions'=102 +':'=106 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 9f700f3905111..62dcc6ebd484b 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -79,12 +79,17 @@ primaryExpression | qualifiedName #dereference | functionExpression #function | LP booleanExpression RP #parenthesizedExpression + | primaryExpression CAST_OP dataType #inlineCast ; functionExpression : identifier LP (ASTERISK | (booleanExpression (COMMA booleanExpression)*))? RP ; +dataType + : identifier #toDataType + ; + rowCommand : ROW fields ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index fc02831fc219f..b496aa68b61f7 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -31,82 +31,83 @@ BY=30 AND=31 ASC=32 ASSIGN=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -LAST=39 -LP=40 -IN=41 -IS=42 -LIKE=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -OPENING_BRACKET=64 -CLOSING_BRACKET=65 -UNQUOTED_IDENTIFIER=66 -QUOTED_IDENTIFIER=67 -EXPR_LINE_COMMENT=68 -EXPR_MULTILINE_COMMENT=69 -EXPR_WS=70 -OPTIONS=71 -METADATA=72 -FROM_UNQUOTED_IDENTIFIER=73 -FROM_LINE_COMMENT=74 -FROM_MULTILINE_COMMENT=75 -FROM_WS=76 -ID_PATTERN=77 -PROJECT_LINE_COMMENT=78 -PROJECT_MULTILINE_COMMENT=79 -PROJECT_WS=80 -AS=81 -RENAME_LINE_COMMENT=82 -RENAME_MULTILINE_COMMENT=83 -RENAME_WS=84 -ON=85 -WITH=86 -ENRICH_POLICY_NAME=87 -ENRICH_LINE_COMMENT=88 -ENRICH_MULTILINE_COMMENT=89 -ENRICH_WS=90 -ENRICH_FIELD_LINE_COMMENT=91 -ENRICH_FIELD_MULTILINE_COMMENT=92 -ENRICH_FIELD_WS=93 -MVEXPAND_LINE_COMMENT=94 -MVEXPAND_MULTILINE_COMMENT=95 -MVEXPAND_WS=96 -INFO=97 -SHOW_LINE_COMMENT=98 -SHOW_MULTILINE_COMMENT=99 -SHOW_WS=100 -FUNCTIONS=101 -META_LINE_COMMENT=102 -META_MULTILINE_COMMENT=103 -META_WS=104 -COLON=105 -SETTING=106 -SETTING_LINE_COMMENT=107 -SETTTING_MULTILINE_COMMENT=108 -SETTING_WS=109 +CAST_OP=34 +COMMA=35 +DESC=36 +DOT=37 +FALSE=38 +FIRST=39 +LAST=40 +LP=41 +IN=42 +IS=43 +LIKE=44 +NOT=45 +NULL=46 +NULLS=47 +OR=48 +PARAM=49 +RLIKE=50 +RP=51 +TRUE=52 +EQ=53 +CIEQ=54 +NEQ=55 +LT=56 +LTE=57 +GT=58 +GTE=59 +PLUS=60 +MINUS=61 +ASTERISK=62 +SLASH=63 +PERCENT=64 +OPENING_BRACKET=65 +CLOSING_BRACKET=66 +UNQUOTED_IDENTIFIER=67 +QUOTED_IDENTIFIER=68 +EXPR_LINE_COMMENT=69 +EXPR_MULTILINE_COMMENT=70 +EXPR_WS=71 +OPTIONS=72 +METADATA=73 +FROM_UNQUOTED_IDENTIFIER=74 +FROM_LINE_COMMENT=75 +FROM_MULTILINE_COMMENT=76 +FROM_WS=77 +ID_PATTERN=78 +PROJECT_LINE_COMMENT=79 +PROJECT_MULTILINE_COMMENT=80 +PROJECT_WS=81 +AS=82 +RENAME_LINE_COMMENT=83 +RENAME_MULTILINE_COMMENT=84 +RENAME_WS=85 +ON=86 +WITH=87 +ENRICH_POLICY_NAME=88 +ENRICH_LINE_COMMENT=89 +ENRICH_MULTILINE_COMMENT=90 +ENRICH_WS=91 +ENRICH_FIELD_LINE_COMMENT=92 +ENRICH_FIELD_MULTILINE_COMMENT=93 +ENRICH_FIELD_WS=94 +MVEXPAND_LINE_COMMENT=95 +MVEXPAND_MULTILINE_COMMENT=96 +MVEXPAND_WS=97 +INFO=98 +SHOW_LINE_COMMENT=99 +SHOW_MULTILINE_COMMENT=100 +SHOW_WS=101 +FUNCTIONS=102 +META_LINE_COMMENT=103 +META_MULTILINE_COMMENT=104 +META_WS=105 +COLON=106 +SETTING=107 +SETTING_LINE_COMMENT=108 +SETTTING_MULTILINE_COMMENT=109 +SETTING_WS=110 'dissect'=1 'drop'=2 'enrich'=3 @@ -130,42 +131,43 @@ SETTING_WS=109 'and'=31 'asc'=32 '='=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'last'=39 -'('=40 -'in'=41 -'is'=42 -'like'=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=65 -'options'=71 -'metadata'=72 -'as'=81 -'on'=85 -'with'=86 -'info'=97 -'functions'=101 -':'=105 +'::'=34 +','=35 +'desc'=36 +'.'=37 +'false'=38 +'first'=39 +'last'=40 +'('=41 +'in'=42 +'is'=43 +'like'=44 +'not'=45 +'null'=46 +'nulls'=47 +'or'=48 +'?'=49 +'rlike'=50 +')'=51 +'true'=52 +'=='=53 +'=~'=54 +'!='=55 +'<'=56 +'<='=57 +'>'=58 +'>='=59 +'+'=60 +'-'=61 +'*'=62 +'/'=63 +'%'=64 +']'=66 +'options'=72 +'metadata'=73 +'as'=82 +'on'=86 +'with'=87 +'info'=98 +'functions'=102 +':'=106 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index b77d229cb5b9e..866093ef55a6c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -33,6 +33,7 @@ null 'and' 'asc' '=' +'::' ',' 'desc' '.' @@ -145,6 +146,7 @@ BY AND ASC ASSIGN +CAST_OP COMMA DESC DOT @@ -268,6 +270,7 @@ BY AND ASC ASSIGN +CAST_OP COMMA DESC DOT @@ -402,4 +405,4 @@ META_MODE SETTING_MODE atn: -[4, 0, 109, 1198, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 482, 8, 18, 11, 18, 12, 18, 483, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 492, 8, 19, 10, 19, 12, 19, 495, 9, 19, 1, 19, 3, 19, 498, 8, 19, 1, 19, 3, 19, 501, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 510, 8, 20, 10, 20, 12, 20, 513, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 521, 8, 21, 11, 21, 12, 21, 522, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 564, 8, 32, 1, 32, 4, 32, 567, 8, 32, 11, 32, 12, 32, 568, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 578, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 585, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 590, 8, 38, 10, 38, 12, 38, 593, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 601, 8, 38, 10, 38, 12, 38, 604, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 611, 8, 38, 1, 38, 3, 38, 614, 8, 38, 3, 38, 616, 8, 38, 1, 39, 4, 39, 619, 8, 39, 11, 39, 12, 39, 620, 1, 40, 4, 40, 624, 8, 40, 11, 40, 12, 40, 625, 1, 40, 1, 40, 5, 40, 630, 8, 40, 10, 40, 12, 40, 633, 9, 40, 1, 40, 1, 40, 4, 40, 637, 8, 40, 11, 40, 12, 40, 638, 1, 40, 4, 40, 642, 8, 40, 11, 40, 12, 40, 643, 1, 40, 1, 40, 5, 40, 648, 8, 40, 10, 40, 12, 40, 651, 9, 40, 3, 40, 653, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 659, 8, 40, 11, 40, 12, 40, 660, 1, 40, 1, 40, 3, 40, 665, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 793, 8, 77, 10, 77, 12, 77, 796, 9, 77, 1, 77, 1, 77, 3, 77, 800, 8, 77, 1, 77, 4, 77, 803, 8, 77, 11, 77, 12, 77, 804, 3, 77, 807, 8, 77, 1, 78, 1, 78, 4, 78, 811, 8, 78, 11, 78, 12, 78, 812, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 3, 91, 876, 8, 91, 1, 92, 4, 92, 879, 8, 92, 11, 92, 12, 92, 880, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 3, 100, 916, 8, 100, 1, 101, 1, 101, 3, 101, 920, 8, 101, 1, 101, 5, 101, 923, 8, 101, 10, 101, 12, 101, 926, 9, 101, 1, 101, 1, 101, 3, 101, 930, 8, 101, 1, 101, 4, 101, 933, 8, 101, 11, 101, 12, 101, 934, 3, 101, 937, 8, 101, 1, 102, 1, 102, 4, 102, 941, 8, 102, 11, 102, 12, 102, 942, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 120, 4, 120, 1018, 8, 120, 11, 120, 12, 120, 1019, 1, 120, 1, 120, 3, 120, 1024, 8, 120, 1, 120, 4, 120, 1027, 8, 120, 11, 120, 12, 120, 1028, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 4, 155, 1183, 8, 155, 11, 155, 12, 155, 1184, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 2, 511, 602, 0, 159, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 0, 170, 67, 172, 68, 174, 69, 176, 70, 178, 0, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 71, 192, 72, 194, 0, 196, 73, 198, 0, 200, 74, 202, 75, 204, 76, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 77, 218, 78, 220, 79, 222, 80, 224, 0, 226, 0, 228, 0, 230, 0, 232, 81, 234, 0, 236, 82, 238, 83, 240, 84, 242, 0, 244, 0, 246, 85, 248, 86, 250, 0, 252, 87, 254, 0, 256, 0, 258, 88, 260, 89, 262, 90, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 91, 280, 92, 282, 93, 284, 0, 286, 0, 288, 0, 290, 0, 292, 94, 294, 95, 296, 96, 298, 0, 300, 97, 302, 98, 304, 99, 306, 100, 308, 0, 310, 101, 312, 102, 314, 103, 316, 104, 318, 0, 320, 105, 322, 106, 324, 107, 326, 108, 328, 109, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1225, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 12, 330, 1, 0, 0, 0, 14, 340, 1, 0, 0, 0, 16, 347, 1, 0, 0, 0, 18, 356, 1, 0, 0, 0, 20, 363, 1, 0, 0, 0, 22, 373, 1, 0, 0, 0, 24, 380, 1, 0, 0, 0, 26, 387, 1, 0, 0, 0, 28, 401, 1, 0, 0, 0, 30, 408, 1, 0, 0, 0, 32, 416, 1, 0, 0, 0, 34, 423, 1, 0, 0, 0, 36, 435, 1, 0, 0, 0, 38, 444, 1, 0, 0, 0, 40, 450, 1, 0, 0, 0, 42, 457, 1, 0, 0, 0, 44, 464, 1, 0, 0, 0, 46, 472, 1, 0, 0, 0, 48, 481, 1, 0, 0, 0, 50, 487, 1, 0, 0, 0, 52, 504, 1, 0, 0, 0, 54, 520, 1, 0, 0, 0, 56, 526, 1, 0, 0, 0, 58, 531, 1, 0, 0, 0, 60, 536, 1, 0, 0, 0, 62, 540, 1, 0, 0, 0, 64, 544, 1, 0, 0, 0, 66, 548, 1, 0, 0, 0, 68, 552, 1, 0, 0, 0, 70, 554, 1, 0, 0, 0, 72, 556, 1, 0, 0, 0, 74, 559, 1, 0, 0, 0, 76, 561, 1, 0, 0, 0, 78, 570, 1, 0, 0, 0, 80, 572, 1, 0, 0, 0, 82, 577, 1, 0, 0, 0, 84, 579, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 615, 1, 0, 0, 0, 90, 618, 1, 0, 0, 0, 92, 664, 1, 0, 0, 0, 94, 666, 1, 0, 0, 0, 96, 669, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 679, 1, 0, 0, 0, 104, 681, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 688, 1, 0, 0, 0, 110, 694, 1, 0, 0, 0, 112, 700, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 707, 1, 0, 0, 0, 118, 710, 1, 0, 0, 0, 120, 713, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 722, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 733, 1, 0, 0, 0, 130, 736, 1, 0, 0, 0, 132, 738, 1, 0, 0, 0, 134, 744, 1, 0, 0, 0, 136, 746, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 754, 1, 0, 0, 0, 142, 757, 1, 0, 0, 0, 144, 760, 1, 0, 0, 0, 146, 762, 1, 0, 0, 0, 148, 765, 1, 0, 0, 0, 150, 767, 1, 0, 0, 0, 152, 770, 1, 0, 0, 0, 154, 772, 1, 0, 0, 0, 156, 774, 1, 0, 0, 0, 158, 776, 1, 0, 0, 0, 160, 778, 1, 0, 0, 0, 162, 780, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 806, 1, 0, 0, 0, 168, 808, 1, 0, 0, 0, 170, 816, 1, 0, 0, 0, 172, 818, 1, 0, 0, 0, 174, 822, 1, 0, 0, 0, 176, 826, 1, 0, 0, 0, 178, 830, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 839, 1, 0, 0, 0, 184, 843, 1, 0, 0, 0, 186, 847, 1, 0, 0, 0, 188, 851, 1, 0, 0, 0, 190, 855, 1, 0, 0, 0, 192, 863, 1, 0, 0, 0, 194, 875, 1, 0, 0, 0, 196, 878, 1, 0, 0, 0, 198, 882, 1, 0, 0, 0, 200, 886, 1, 0, 0, 0, 202, 890, 1, 0, 0, 0, 204, 894, 1, 0, 0, 0, 206, 898, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 907, 1, 0, 0, 0, 212, 915, 1, 0, 0, 0, 214, 936, 1, 0, 0, 0, 216, 940, 1, 0, 0, 0, 218, 944, 1, 0, 0, 0, 220, 948, 1, 0, 0, 0, 222, 952, 1, 0, 0, 0, 224, 956, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 965, 1, 0, 0, 0, 230, 969, 1, 0, 0, 0, 232, 973, 1, 0, 0, 0, 234, 976, 1, 0, 0, 0, 236, 980, 1, 0, 0, 0, 238, 984, 1, 0, 0, 0, 240, 988, 1, 0, 0, 0, 242, 992, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1014, 1, 0, 0, 0, 252, 1023, 1, 0, 0, 0, 254, 1030, 1, 0, 0, 0, 256, 1034, 1, 0, 0, 0, 258, 1038, 1, 0, 0, 0, 260, 1042, 1, 0, 0, 0, 262, 1046, 1, 0, 0, 0, 264, 1050, 1, 0, 0, 0, 266, 1056, 1, 0, 0, 0, 268, 1060, 1, 0, 0, 0, 270, 1064, 1, 0, 0, 0, 272, 1068, 1, 0, 0, 0, 274, 1072, 1, 0, 0, 0, 276, 1076, 1, 0, 0, 0, 278, 1080, 1, 0, 0, 0, 280, 1084, 1, 0, 0, 0, 282, 1088, 1, 0, 0, 0, 284, 1092, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1101, 1, 0, 0, 0, 290, 1105, 1, 0, 0, 0, 292, 1109, 1, 0, 0, 0, 294, 1113, 1, 0, 0, 0, 296, 1117, 1, 0, 0, 0, 298, 1121, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1135, 1, 0, 0, 0, 306, 1139, 1, 0, 0, 0, 308, 1143, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1158, 1, 0, 0, 0, 314, 1162, 1, 0, 0, 0, 316, 1166, 1, 0, 0, 0, 318, 1170, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1182, 1, 0, 0, 0, 324, 1186, 1, 0, 0, 0, 326, 1190, 1, 0, 0, 0, 328, 1194, 1, 0, 0, 0, 330, 331, 5, 100, 0, 0, 331, 332, 5, 105, 0, 0, 332, 333, 5, 115, 0, 0, 333, 334, 5, 115, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 99, 0, 0, 336, 337, 5, 116, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 6, 0, 0, 0, 339, 13, 1, 0, 0, 0, 340, 341, 5, 100, 0, 0, 341, 342, 5, 114, 0, 0, 342, 343, 5, 111, 0, 0, 343, 344, 5, 112, 0, 0, 344, 345, 1, 0, 0, 0, 345, 346, 6, 1, 1, 0, 346, 15, 1, 0, 0, 0, 347, 348, 5, 101, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 114, 0, 0, 350, 351, 5, 105, 0, 0, 351, 352, 5, 99, 0, 0, 352, 353, 5, 104, 0, 0, 353, 354, 1, 0, 0, 0, 354, 355, 6, 2, 2, 0, 355, 17, 1, 0, 0, 0, 356, 357, 5, 101, 0, 0, 357, 358, 5, 118, 0, 0, 358, 359, 5, 97, 0, 0, 359, 360, 5, 108, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 6, 3, 0, 0, 362, 19, 1, 0, 0, 0, 363, 364, 5, 101, 0, 0, 364, 365, 5, 120, 0, 0, 365, 366, 5, 112, 0, 0, 366, 367, 5, 108, 0, 0, 367, 368, 5, 97, 0, 0, 368, 369, 5, 105, 0, 0, 369, 370, 5, 110, 0, 0, 370, 371, 1, 0, 0, 0, 371, 372, 6, 4, 3, 0, 372, 21, 1, 0, 0, 0, 373, 374, 5, 102, 0, 0, 374, 375, 5, 114, 0, 0, 375, 376, 5, 111, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 6, 5, 4, 0, 379, 23, 1, 0, 0, 0, 380, 381, 5, 103, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 111, 0, 0, 383, 384, 5, 107, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 6, 6, 0, 0, 386, 25, 1, 0, 0, 0, 387, 388, 5, 105, 0, 0, 388, 389, 5, 110, 0, 0, 389, 390, 5, 108, 0, 0, 390, 391, 5, 105, 0, 0, 391, 392, 5, 110, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 116, 0, 0, 395, 396, 5, 97, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 115, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 7, 0, 0, 400, 27, 1, 0, 0, 0, 401, 402, 5, 107, 0, 0, 402, 403, 5, 101, 0, 0, 403, 404, 5, 101, 0, 0, 404, 405, 5, 112, 0, 0, 405, 406, 1, 0, 0, 0, 406, 407, 6, 8, 1, 0, 407, 29, 1, 0, 0, 0, 408, 409, 5, 108, 0, 0, 409, 410, 5, 105, 0, 0, 410, 411, 5, 109, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 9, 0, 0, 415, 31, 1, 0, 0, 0, 416, 417, 5, 109, 0, 0, 417, 418, 5, 101, 0, 0, 418, 419, 5, 116, 0, 0, 419, 420, 5, 97, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 10, 5, 0, 422, 33, 1, 0, 0, 0, 423, 424, 5, 109, 0, 0, 424, 425, 5, 118, 0, 0, 425, 426, 5, 95, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 120, 0, 0, 428, 429, 5, 112, 0, 0, 429, 430, 5, 97, 0, 0, 430, 431, 5, 110, 0, 0, 431, 432, 5, 100, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 11, 6, 0, 434, 35, 1, 0, 0, 0, 435, 436, 5, 114, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 5, 110, 0, 0, 438, 439, 5, 97, 0, 0, 439, 440, 5, 109, 0, 0, 440, 441, 5, 101, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 12, 7, 0, 443, 37, 1, 0, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 111, 0, 0, 446, 447, 5, 119, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 13, 0, 0, 449, 39, 1, 0, 0, 0, 450, 451, 5, 115, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 111, 0, 0, 453, 454, 5, 119, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 6, 14, 8, 0, 456, 41, 1, 0, 0, 0, 457, 458, 5, 115, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 114, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 15, 0, 0, 463, 43, 1, 0, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, 5, 97, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 115, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 16, 0, 0, 471, 45, 1, 0, 0, 0, 472, 473, 5, 119, 0, 0, 473, 474, 5, 104, 0, 0, 474, 475, 5, 101, 0, 0, 475, 476, 5, 114, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 17, 0, 0, 479, 47, 1, 0, 0, 0, 480, 482, 8, 0, 0, 0, 481, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 18, 0, 0, 486, 49, 1, 0, 0, 0, 487, 488, 5, 47, 0, 0, 488, 489, 5, 47, 0, 0, 489, 493, 1, 0, 0, 0, 490, 492, 8, 1, 0, 0, 491, 490, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 498, 5, 13, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 500, 1, 0, 0, 0, 499, 501, 5, 10, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 19, 9, 0, 503, 51, 1, 0, 0, 0, 504, 505, 5, 47, 0, 0, 505, 506, 5, 42, 0, 0, 506, 511, 1, 0, 0, 0, 507, 510, 3, 52, 20, 0, 508, 510, 9, 0, 0, 0, 509, 507, 1, 0, 0, 0, 509, 508, 1, 0, 0, 0, 510, 513, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 511, 509, 1, 0, 0, 0, 512, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 515, 5, 42, 0, 0, 515, 516, 5, 47, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 20, 9, 0, 518, 53, 1, 0, 0, 0, 519, 521, 7, 2, 0, 0, 520, 519, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 21, 9, 0, 525, 55, 1, 0, 0, 0, 526, 527, 3, 162, 75, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 22, 10, 0, 529, 530, 6, 22, 11, 0, 530, 57, 1, 0, 0, 0, 531, 532, 3, 66, 27, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 23, 12, 0, 534, 535, 6, 23, 13, 0, 535, 59, 1, 0, 0, 0, 536, 537, 3, 54, 21, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 24, 9, 0, 539, 61, 1, 0, 0, 0, 540, 541, 3, 50, 19, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 25, 9, 0, 543, 63, 1, 0, 0, 0, 544, 545, 3, 52, 20, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 26, 9, 0, 547, 65, 1, 0, 0, 0, 548, 549, 5, 124, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 27, 13, 0, 551, 67, 1, 0, 0, 0, 552, 553, 7, 3, 0, 0, 553, 69, 1, 0, 0, 0, 554, 555, 7, 4, 0, 0, 555, 71, 1, 0, 0, 0, 556, 557, 5, 92, 0, 0, 557, 558, 7, 5, 0, 0, 558, 73, 1, 0, 0, 0, 559, 560, 8, 6, 0, 0, 560, 75, 1, 0, 0, 0, 561, 563, 7, 7, 0, 0, 562, 564, 7, 8, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 566, 1, 0, 0, 0, 565, 567, 3, 68, 28, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 77, 1, 0, 0, 0, 570, 571, 5, 64, 0, 0, 571, 79, 1, 0, 0, 0, 572, 573, 5, 96, 0, 0, 573, 81, 1, 0, 0, 0, 574, 578, 8, 9, 0, 0, 575, 576, 5, 96, 0, 0, 576, 578, 5, 96, 0, 0, 577, 574, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 578, 83, 1, 0, 0, 0, 579, 580, 5, 95, 0, 0, 580, 85, 1, 0, 0, 0, 581, 585, 3, 70, 29, 0, 582, 585, 3, 68, 28, 0, 583, 585, 3, 84, 36, 0, 584, 581, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 583, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 591, 5, 34, 0, 0, 587, 590, 3, 72, 30, 0, 588, 590, 3, 74, 31, 0, 589, 587, 1, 0, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 616, 5, 34, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 1, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 606, 5, 34, 0, 0, 606, 607, 5, 34, 0, 0, 607, 608, 5, 34, 0, 0, 608, 610, 1, 0, 0, 0, 609, 611, 5, 34, 0, 0, 610, 609, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 613, 1, 0, 0, 0, 612, 614, 5, 34, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 616, 1, 0, 0, 0, 615, 586, 1, 0, 0, 0, 615, 595, 1, 0, 0, 0, 616, 89, 1, 0, 0, 0, 617, 619, 3, 68, 28, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 91, 1, 0, 0, 0, 622, 624, 3, 68, 28, 0, 623, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 631, 3, 106, 47, 0, 628, 630, 3, 68, 28, 0, 629, 628, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 665, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 634, 636, 3, 106, 47, 0, 635, 637, 3, 68, 28, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 665, 1, 0, 0, 0, 640, 642, 3, 68, 28, 0, 641, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 652, 1, 0, 0, 0, 645, 649, 3, 106, 47, 0, 646, 648, 3, 68, 28, 0, 647, 646, 1, 0, 0, 0, 648, 651, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 645, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 76, 32, 0, 655, 665, 1, 0, 0, 0, 656, 658, 3, 106, 47, 0, 657, 659, 3, 68, 28, 0, 658, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 3, 76, 32, 0, 663, 665, 1, 0, 0, 0, 664, 623, 1, 0, 0, 0, 664, 634, 1, 0, 0, 0, 664, 641, 1, 0, 0, 0, 664, 656, 1, 0, 0, 0, 665, 93, 1, 0, 0, 0, 666, 667, 5, 98, 0, 0, 667, 668, 5, 121, 0, 0, 668, 95, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 110, 0, 0, 671, 672, 5, 100, 0, 0, 672, 97, 1, 0, 0, 0, 673, 674, 5, 97, 0, 0, 674, 675, 5, 115, 0, 0, 675, 676, 5, 99, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 61, 0, 0, 678, 101, 1, 0, 0, 0, 679, 680, 5, 44, 0, 0, 680, 103, 1, 0, 0, 0, 681, 682, 5, 100, 0, 0, 682, 683, 5, 101, 0, 0, 683, 684, 5, 115, 0, 0, 684, 685, 5, 99, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 46, 0, 0, 687, 107, 1, 0, 0, 0, 688, 689, 5, 102, 0, 0, 689, 690, 5, 97, 0, 0, 690, 691, 5, 108, 0, 0, 691, 692, 5, 115, 0, 0, 692, 693, 5, 101, 0, 0, 693, 109, 1, 0, 0, 0, 694, 695, 5, 102, 0, 0, 695, 696, 5, 105, 0, 0, 696, 697, 5, 114, 0, 0, 697, 698, 5, 115, 0, 0, 698, 699, 5, 116, 0, 0, 699, 111, 1, 0, 0, 0, 700, 701, 5, 108, 0, 0, 701, 702, 5, 97, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 40, 0, 0, 706, 115, 1, 0, 0, 0, 707, 708, 5, 105, 0, 0, 708, 709, 5, 110, 0, 0, 709, 117, 1, 0, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 115, 0, 0, 712, 119, 1, 0, 0, 0, 713, 714, 5, 108, 0, 0, 714, 715, 5, 105, 0, 0, 715, 716, 5, 107, 0, 0, 716, 717, 5, 101, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 110, 0, 0, 719, 720, 5, 111, 0, 0, 720, 721, 5, 116, 0, 0, 721, 123, 1, 0, 0, 0, 722, 723, 5, 110, 0, 0, 723, 724, 5, 117, 0, 0, 724, 725, 5, 108, 0, 0, 725, 726, 5, 108, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 732, 5, 115, 0, 0, 732, 127, 1, 0, 0, 0, 733, 734, 5, 111, 0, 0, 734, 735, 5, 114, 0, 0, 735, 129, 1, 0, 0, 0, 736, 737, 5, 63, 0, 0, 737, 131, 1, 0, 0, 0, 738, 739, 5, 114, 0, 0, 739, 740, 5, 108, 0, 0, 740, 741, 5, 105, 0, 0, 741, 742, 5, 107, 0, 0, 742, 743, 5, 101, 0, 0, 743, 133, 1, 0, 0, 0, 744, 745, 5, 41, 0, 0, 745, 135, 1, 0, 0, 0, 746, 747, 5, 116, 0, 0, 747, 748, 5, 114, 0, 0, 748, 749, 5, 117, 0, 0, 749, 750, 5, 101, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 753, 5, 61, 0, 0, 753, 139, 1, 0, 0, 0, 754, 755, 5, 61, 0, 0, 755, 756, 5, 126, 0, 0, 756, 141, 1, 0, 0, 0, 757, 758, 5, 33, 0, 0, 758, 759, 5, 61, 0, 0, 759, 143, 1, 0, 0, 0, 760, 761, 5, 60, 0, 0, 761, 145, 1, 0, 0, 0, 762, 763, 5, 60, 0, 0, 763, 764, 5, 61, 0, 0, 764, 147, 1, 0, 0, 0, 765, 766, 5, 62, 0, 0, 766, 149, 1, 0, 0, 0, 767, 768, 5, 62, 0, 0, 768, 769, 5, 61, 0, 0, 769, 151, 1, 0, 0, 0, 770, 771, 5, 43, 0, 0, 771, 153, 1, 0, 0, 0, 772, 773, 5, 45, 0, 0, 773, 155, 1, 0, 0, 0, 774, 775, 5, 42, 0, 0, 775, 157, 1, 0, 0, 0, 776, 777, 5, 47, 0, 0, 777, 159, 1, 0, 0, 0, 778, 779, 5, 37, 0, 0, 779, 161, 1, 0, 0, 0, 780, 781, 5, 91, 0, 0, 781, 782, 1, 0, 0, 0, 782, 783, 6, 75, 0, 0, 783, 784, 6, 75, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 93, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 13, 0, 788, 789, 6, 76, 13, 0, 789, 165, 1, 0, 0, 0, 790, 794, 3, 70, 29, 0, 791, 793, 3, 86, 37, 0, 792, 791, 1, 0, 0, 0, 793, 796, 1, 0, 0, 0, 794, 792, 1, 0, 0, 0, 794, 795, 1, 0, 0, 0, 795, 807, 1, 0, 0, 0, 796, 794, 1, 0, 0, 0, 797, 800, 3, 84, 36, 0, 798, 800, 3, 78, 33, 0, 799, 797, 1, 0, 0, 0, 799, 798, 1, 0, 0, 0, 800, 802, 1, 0, 0, 0, 801, 803, 3, 86, 37, 0, 802, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 802, 1, 0, 0, 0, 804, 805, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 790, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 807, 167, 1, 0, 0, 0, 808, 810, 3, 80, 34, 0, 809, 811, 3, 82, 35, 0, 810, 809, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 810, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 80, 34, 0, 815, 169, 1, 0, 0, 0, 816, 817, 3, 168, 78, 0, 817, 171, 1, 0, 0, 0, 818, 819, 3, 50, 19, 0, 819, 820, 1, 0, 0, 0, 820, 821, 6, 80, 9, 0, 821, 173, 1, 0, 0, 0, 822, 823, 3, 52, 20, 0, 823, 824, 1, 0, 0, 0, 824, 825, 6, 81, 9, 0, 825, 175, 1, 0, 0, 0, 826, 827, 3, 54, 21, 0, 827, 828, 1, 0, 0, 0, 828, 829, 6, 82, 9, 0, 829, 177, 1, 0, 0, 0, 830, 831, 3, 66, 27, 0, 831, 832, 1, 0, 0, 0, 832, 833, 6, 83, 12, 0, 833, 834, 6, 83, 13, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 162, 75, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 10, 0, 838, 181, 1, 0, 0, 0, 839, 840, 3, 164, 76, 0, 840, 841, 1, 0, 0, 0, 841, 842, 6, 85, 14, 0, 842, 183, 1, 0, 0, 0, 843, 844, 3, 102, 45, 0, 844, 845, 1, 0, 0, 0, 845, 846, 6, 86, 15, 0, 846, 185, 1, 0, 0, 0, 847, 848, 3, 100, 44, 0, 848, 849, 1, 0, 0, 0, 849, 850, 6, 87, 16, 0, 850, 187, 1, 0, 0, 0, 851, 852, 3, 88, 38, 0, 852, 853, 1, 0, 0, 0, 853, 854, 6, 88, 17, 0, 854, 189, 1, 0, 0, 0, 855, 856, 5, 111, 0, 0, 856, 857, 5, 112, 0, 0, 857, 858, 5, 116, 0, 0, 858, 859, 5, 105, 0, 0, 859, 860, 5, 111, 0, 0, 860, 861, 5, 110, 0, 0, 861, 862, 5, 115, 0, 0, 862, 191, 1, 0, 0, 0, 863, 864, 5, 109, 0, 0, 864, 865, 5, 101, 0, 0, 865, 866, 5, 116, 0, 0, 866, 867, 5, 97, 0, 0, 867, 868, 5, 100, 0, 0, 868, 869, 5, 97, 0, 0, 869, 870, 5, 116, 0, 0, 870, 871, 5, 97, 0, 0, 871, 193, 1, 0, 0, 0, 872, 876, 8, 10, 0, 0, 873, 874, 5, 47, 0, 0, 874, 876, 8, 11, 0, 0, 875, 872, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 876, 195, 1, 0, 0, 0, 877, 879, 3, 194, 91, 0, 878, 877, 1, 0, 0, 0, 879, 880, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 883, 3, 170, 79, 0, 883, 884, 1, 0, 0, 0, 884, 885, 6, 93, 18, 0, 885, 199, 1, 0, 0, 0, 886, 887, 3, 50, 19, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 94, 9, 0, 889, 201, 1, 0, 0, 0, 890, 891, 3, 52, 20, 0, 891, 892, 1, 0, 0, 0, 892, 893, 6, 95, 9, 0, 893, 203, 1, 0, 0, 0, 894, 895, 3, 54, 21, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 96, 9, 0, 897, 205, 1, 0, 0, 0, 898, 899, 3, 66, 27, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 97, 12, 0, 901, 902, 6, 97, 13, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 106, 47, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 19, 0, 906, 209, 1, 0, 0, 0, 907, 908, 3, 102, 45, 0, 908, 909, 1, 0, 0, 0, 909, 910, 6, 99, 15, 0, 910, 211, 1, 0, 0, 0, 911, 916, 3, 70, 29, 0, 912, 916, 3, 68, 28, 0, 913, 916, 3, 84, 36, 0, 914, 916, 3, 156, 72, 0, 915, 911, 1, 0, 0, 0, 915, 912, 1, 0, 0, 0, 915, 913, 1, 0, 0, 0, 915, 914, 1, 0, 0, 0, 916, 213, 1, 0, 0, 0, 917, 920, 3, 70, 29, 0, 918, 920, 3, 156, 72, 0, 919, 917, 1, 0, 0, 0, 919, 918, 1, 0, 0, 0, 920, 924, 1, 0, 0, 0, 921, 923, 3, 212, 100, 0, 922, 921, 1, 0, 0, 0, 923, 926, 1, 0, 0, 0, 924, 922, 1, 0, 0, 0, 924, 925, 1, 0, 0, 0, 925, 937, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 927, 930, 3, 84, 36, 0, 928, 930, 3, 78, 33, 0, 929, 927, 1, 0, 0, 0, 929, 928, 1, 0, 0, 0, 930, 932, 1, 0, 0, 0, 931, 933, 3, 212, 100, 0, 932, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 932, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 919, 1, 0, 0, 0, 936, 929, 1, 0, 0, 0, 937, 215, 1, 0, 0, 0, 938, 941, 3, 214, 101, 0, 939, 941, 3, 168, 78, 0, 940, 938, 1, 0, 0, 0, 940, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 940, 1, 0, 0, 0, 942, 943, 1, 0, 0, 0, 943, 217, 1, 0, 0, 0, 944, 945, 3, 50, 19, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 103, 9, 0, 947, 219, 1, 0, 0, 0, 948, 949, 3, 52, 20, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 104, 9, 0, 951, 221, 1, 0, 0, 0, 952, 953, 3, 54, 21, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 105, 9, 0, 955, 223, 1, 0, 0, 0, 956, 957, 3, 66, 27, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 106, 12, 0, 959, 960, 6, 106, 13, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 100, 44, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 16, 0, 964, 227, 1, 0, 0, 0, 965, 966, 3, 102, 45, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 108, 15, 0, 968, 229, 1, 0, 0, 0, 969, 970, 3, 106, 47, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 109, 19, 0, 972, 231, 1, 0, 0, 0, 973, 974, 5, 97, 0, 0, 974, 975, 5, 115, 0, 0, 975, 233, 1, 0, 0, 0, 976, 977, 3, 216, 102, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 111, 20, 0, 979, 235, 1, 0, 0, 0, 980, 981, 3, 50, 19, 0, 981, 982, 1, 0, 0, 0, 982, 983, 6, 112, 9, 0, 983, 237, 1, 0, 0, 0, 984, 985, 3, 52, 20, 0, 985, 986, 1, 0, 0, 0, 986, 987, 6, 113, 9, 0, 987, 239, 1, 0, 0, 0, 988, 989, 3, 54, 21, 0, 989, 990, 1, 0, 0, 0, 990, 991, 6, 114, 9, 0, 991, 241, 1, 0, 0, 0, 992, 993, 3, 66, 27, 0, 993, 994, 1, 0, 0, 0, 994, 995, 6, 115, 12, 0, 995, 996, 6, 115, 13, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 162, 75, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 10, 0, 1000, 1001, 6, 116, 21, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 5, 111, 0, 0, 1003, 1004, 5, 110, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 117, 22, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 119, 0, 0, 1008, 1009, 5, 105, 0, 0, 1009, 1010, 5, 116, 0, 0, 1010, 1011, 5, 104, 0, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 118, 22, 0, 1013, 249, 1, 0, 0, 0, 1014, 1015, 8, 12, 0, 0, 1015, 251, 1, 0, 0, 0, 1016, 1018, 3, 250, 119, 0, 1017, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1017, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1022, 3, 320, 154, 0, 1022, 1024, 1, 0, 0, 0, 1023, 1017, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1026, 1, 0, 0, 0, 1025, 1027, 3, 250, 119, 0, 1026, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1026, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 253, 1, 0, 0, 0, 1030, 1031, 3, 170, 79, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 121, 18, 0, 1033, 255, 1, 0, 0, 0, 1034, 1035, 3, 252, 120, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 122, 23, 0, 1037, 257, 1, 0, 0, 0, 1038, 1039, 3, 50, 19, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 123, 9, 0, 1041, 259, 1, 0, 0, 0, 1042, 1043, 3, 52, 20, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 124, 9, 0, 1045, 261, 1, 0, 0, 0, 1046, 1047, 3, 54, 21, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 125, 9, 0, 1049, 263, 1, 0, 0, 0, 1050, 1051, 3, 66, 27, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 126, 12, 0, 1053, 1054, 6, 126, 13, 0, 1054, 1055, 6, 126, 13, 0, 1055, 265, 1, 0, 0, 0, 1056, 1057, 3, 100, 44, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 127, 16, 0, 1059, 267, 1, 0, 0, 0, 1060, 1061, 3, 102, 45, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 128, 15, 0, 1063, 269, 1, 0, 0, 0, 1064, 1065, 3, 106, 47, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 129, 19, 0, 1067, 271, 1, 0, 0, 0, 1068, 1069, 3, 248, 118, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 130, 24, 0, 1071, 273, 1, 0, 0, 0, 1072, 1073, 3, 216, 102, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 131, 20, 0, 1075, 275, 1, 0, 0, 0, 1076, 1077, 3, 170, 79, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 132, 18, 0, 1079, 277, 1, 0, 0, 0, 1080, 1081, 3, 50, 19, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 133, 9, 0, 1083, 279, 1, 0, 0, 0, 1084, 1085, 3, 52, 20, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 134, 9, 0, 1087, 281, 1, 0, 0, 0, 1088, 1089, 3, 54, 21, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 135, 9, 0, 1091, 283, 1, 0, 0, 0, 1092, 1093, 3, 66, 27, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 136, 12, 0, 1095, 1096, 6, 136, 13, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 106, 47, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 19, 0, 1100, 287, 1, 0, 0, 0, 1101, 1102, 3, 170, 79, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 138, 18, 0, 1104, 289, 1, 0, 0, 0, 1105, 1106, 3, 166, 77, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 139, 25, 0, 1108, 291, 1, 0, 0, 0, 1109, 1110, 3, 50, 19, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 140, 9, 0, 1112, 293, 1, 0, 0, 0, 1113, 1114, 3, 52, 20, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 141, 9, 0, 1116, 295, 1, 0, 0, 0, 1117, 1118, 3, 54, 21, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 142, 9, 0, 1120, 297, 1, 0, 0, 0, 1121, 1122, 3, 66, 27, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 143, 12, 0, 1124, 1125, 6, 143, 13, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 5, 105, 0, 0, 1127, 1128, 5, 110, 0, 0, 1128, 1129, 5, 102, 0, 0, 1129, 1130, 5, 111, 0, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 3, 50, 19, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 145, 9, 0, 1134, 303, 1, 0, 0, 0, 1135, 1136, 3, 52, 20, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 146, 9, 0, 1138, 305, 1, 0, 0, 0, 1139, 1140, 3, 54, 21, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 147, 9, 0, 1142, 307, 1, 0, 0, 0, 1143, 1144, 3, 66, 27, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1146, 6, 148, 12, 0, 1146, 1147, 6, 148, 13, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 5, 102, 0, 0, 1149, 1150, 5, 117, 0, 0, 1150, 1151, 5, 110, 0, 0, 1151, 1152, 5, 99, 0, 0, 1152, 1153, 5, 116, 0, 0, 1153, 1154, 5, 105, 0, 0, 1154, 1155, 5, 111, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 115, 0, 0, 1157, 311, 1, 0, 0, 0, 1158, 1159, 3, 50, 19, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 150, 9, 0, 1161, 313, 1, 0, 0, 0, 1162, 1163, 3, 52, 20, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 151, 9, 0, 1165, 315, 1, 0, 0, 0, 1166, 1167, 3, 54, 21, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 152, 9, 0, 1169, 317, 1, 0, 0, 0, 1170, 1171, 3, 164, 76, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 153, 14, 0, 1173, 1174, 6, 153, 13, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 5, 58, 0, 0, 1176, 321, 1, 0, 0, 0, 1177, 1183, 3, 78, 33, 0, 1178, 1183, 3, 68, 28, 0, 1179, 1183, 3, 106, 47, 0, 1180, 1183, 3, 70, 29, 0, 1181, 1183, 3, 84, 36, 0, 1182, 1177, 1, 0, 0, 0, 1182, 1178, 1, 0, 0, 0, 1182, 1179, 1, 0, 0, 0, 1182, 1180, 1, 0, 0, 0, 1182, 1181, 1, 0, 0, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1182, 1, 0, 0, 0, 1184, 1185, 1, 0, 0, 0, 1185, 323, 1, 0, 0, 0, 1186, 1187, 3, 50, 19, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 156, 9, 0, 1189, 325, 1, 0, 0, 0, 1190, 1191, 3, 52, 20, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 157, 9, 0, 1193, 327, 1, 0, 0, 0, 1194, 1195, 3, 54, 21, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 158, 9, 0, 1197, 329, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 483, 493, 497, 500, 509, 511, 522, 563, 568, 577, 584, 589, 591, 602, 610, 613, 615, 620, 625, 631, 638, 643, 649, 652, 660, 664, 794, 799, 804, 806, 812, 875, 880, 915, 919, 924, 929, 934, 936, 940, 942, 1019, 1023, 1028, 1182, 1184, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 64, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 65, 0, 7, 34, 0, 7, 33, 0, 7, 27, 0, 7, 67, 0, 7, 36, 0, 7, 77, 0, 5, 11, 0, 5, 7, 0, 7, 87, 0, 7, 86, 0, 7, 66, 0] \ No newline at end of file +[4, 0, 110, 1203, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 484, 8, 18, 11, 18, 12, 18, 485, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 494, 8, 19, 10, 19, 12, 19, 497, 9, 19, 1, 19, 3, 19, 500, 8, 19, 1, 19, 3, 19, 503, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 512, 8, 20, 10, 20, 12, 20, 515, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 523, 8, 21, 11, 21, 12, 21, 524, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 4, 32, 569, 8, 32, 11, 32, 12, 32, 570, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 580, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 592, 8, 38, 10, 38, 12, 38, 595, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 603, 8, 38, 10, 38, 12, 38, 606, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 613, 8, 38, 1, 38, 3, 38, 616, 8, 38, 3, 38, 618, 8, 38, 1, 39, 4, 39, 621, 8, 39, 11, 39, 12, 39, 622, 1, 40, 4, 40, 626, 8, 40, 11, 40, 12, 40, 627, 1, 40, 1, 40, 5, 40, 632, 8, 40, 10, 40, 12, 40, 635, 9, 40, 1, 40, 1, 40, 4, 40, 639, 8, 40, 11, 40, 12, 40, 640, 1, 40, 4, 40, 644, 8, 40, 11, 40, 12, 40, 645, 1, 40, 1, 40, 5, 40, 650, 8, 40, 10, 40, 12, 40, 653, 9, 40, 3, 40, 655, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 661, 8, 40, 11, 40, 12, 40, 662, 1, 40, 1, 40, 3, 40, 667, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 798, 8, 78, 10, 78, 12, 78, 801, 9, 78, 1, 78, 1, 78, 3, 78, 805, 8, 78, 1, 78, 4, 78, 808, 8, 78, 11, 78, 12, 78, 809, 3, 78, 812, 8, 78, 1, 79, 1, 79, 4, 79, 816, 8, 79, 11, 79, 12, 79, 817, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 881, 8, 92, 1, 93, 4, 93, 884, 8, 93, 11, 93, 12, 93, 885, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 3, 101, 921, 8, 101, 1, 102, 1, 102, 3, 102, 925, 8, 102, 1, 102, 5, 102, 928, 8, 102, 10, 102, 12, 102, 931, 9, 102, 1, 102, 1, 102, 3, 102, 935, 8, 102, 1, 102, 4, 102, 938, 8, 102, 11, 102, 12, 102, 939, 3, 102, 942, 8, 102, 1, 103, 1, 103, 4, 103, 946, 8, 103, 11, 103, 12, 103, 947, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 121, 4, 121, 1023, 8, 121, 11, 121, 12, 121, 1024, 1, 121, 1, 121, 3, 121, 1029, 8, 121, 1, 121, 4, 121, 1032, 8, 121, 11, 121, 12, 121, 1033, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 4, 156, 1188, 8, 156, 11, 156, 12, 156, 1189, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 2, 513, 604, 0, 160, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 0, 202, 75, 204, 76, 206, 77, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 78, 220, 79, 222, 80, 224, 81, 226, 0, 228, 0, 230, 0, 232, 0, 234, 82, 236, 0, 238, 83, 240, 84, 242, 85, 244, 0, 246, 0, 248, 86, 250, 87, 252, 0, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 91, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 92, 282, 93, 284, 94, 286, 0, 288, 0, 290, 0, 292, 0, 294, 95, 296, 96, 298, 97, 300, 0, 302, 98, 304, 99, 306, 100, 308, 101, 310, 0, 312, 102, 314, 103, 316, 104, 318, 105, 320, 0, 322, 106, 324, 107, 326, 108, 328, 109, 330, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1230, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 12, 332, 1, 0, 0, 0, 14, 342, 1, 0, 0, 0, 16, 349, 1, 0, 0, 0, 18, 358, 1, 0, 0, 0, 20, 365, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 382, 1, 0, 0, 0, 26, 389, 1, 0, 0, 0, 28, 403, 1, 0, 0, 0, 30, 410, 1, 0, 0, 0, 32, 418, 1, 0, 0, 0, 34, 425, 1, 0, 0, 0, 36, 437, 1, 0, 0, 0, 38, 446, 1, 0, 0, 0, 40, 452, 1, 0, 0, 0, 42, 459, 1, 0, 0, 0, 44, 466, 1, 0, 0, 0, 46, 474, 1, 0, 0, 0, 48, 483, 1, 0, 0, 0, 50, 489, 1, 0, 0, 0, 52, 506, 1, 0, 0, 0, 54, 522, 1, 0, 0, 0, 56, 528, 1, 0, 0, 0, 58, 533, 1, 0, 0, 0, 60, 538, 1, 0, 0, 0, 62, 542, 1, 0, 0, 0, 64, 546, 1, 0, 0, 0, 66, 550, 1, 0, 0, 0, 68, 554, 1, 0, 0, 0, 70, 556, 1, 0, 0, 0, 72, 558, 1, 0, 0, 0, 74, 561, 1, 0, 0, 0, 76, 563, 1, 0, 0, 0, 78, 572, 1, 0, 0, 0, 80, 574, 1, 0, 0, 0, 82, 579, 1, 0, 0, 0, 84, 581, 1, 0, 0, 0, 86, 586, 1, 0, 0, 0, 88, 617, 1, 0, 0, 0, 90, 620, 1, 0, 0, 0, 92, 666, 1, 0, 0, 0, 94, 668, 1, 0, 0, 0, 96, 671, 1, 0, 0, 0, 98, 675, 1, 0, 0, 0, 100, 679, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 684, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 691, 1, 0, 0, 0, 110, 693, 1, 0, 0, 0, 112, 699, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 710, 1, 0, 0, 0, 118, 712, 1, 0, 0, 0, 120, 715, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 723, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 732, 1, 0, 0, 0, 130, 738, 1, 0, 0, 0, 132, 741, 1, 0, 0, 0, 134, 743, 1, 0, 0, 0, 136, 749, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 756, 1, 0, 0, 0, 142, 759, 1, 0, 0, 0, 144, 762, 1, 0, 0, 0, 146, 765, 1, 0, 0, 0, 148, 767, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 775, 1, 0, 0, 0, 156, 777, 1, 0, 0, 0, 158, 779, 1, 0, 0, 0, 160, 781, 1, 0, 0, 0, 162, 783, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 790, 1, 0, 0, 0, 168, 811, 1, 0, 0, 0, 170, 813, 1, 0, 0, 0, 172, 821, 1, 0, 0, 0, 174, 823, 1, 0, 0, 0, 176, 827, 1, 0, 0, 0, 178, 831, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 840, 1, 0, 0, 0, 184, 844, 1, 0, 0, 0, 186, 848, 1, 0, 0, 0, 188, 852, 1, 0, 0, 0, 190, 856, 1, 0, 0, 0, 192, 860, 1, 0, 0, 0, 194, 868, 1, 0, 0, 0, 196, 880, 1, 0, 0, 0, 198, 883, 1, 0, 0, 0, 200, 887, 1, 0, 0, 0, 202, 891, 1, 0, 0, 0, 204, 895, 1, 0, 0, 0, 206, 899, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 908, 1, 0, 0, 0, 212, 912, 1, 0, 0, 0, 214, 920, 1, 0, 0, 0, 216, 941, 1, 0, 0, 0, 218, 945, 1, 0, 0, 0, 220, 949, 1, 0, 0, 0, 222, 953, 1, 0, 0, 0, 224, 957, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 966, 1, 0, 0, 0, 230, 970, 1, 0, 0, 0, 232, 974, 1, 0, 0, 0, 234, 978, 1, 0, 0, 0, 236, 981, 1, 0, 0, 0, 238, 985, 1, 0, 0, 0, 240, 989, 1, 0, 0, 0, 242, 993, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1012, 1, 0, 0, 0, 252, 1019, 1, 0, 0, 0, 254, 1028, 1, 0, 0, 0, 256, 1035, 1, 0, 0, 0, 258, 1039, 1, 0, 0, 0, 260, 1043, 1, 0, 0, 0, 262, 1047, 1, 0, 0, 0, 264, 1051, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1061, 1, 0, 0, 0, 270, 1065, 1, 0, 0, 0, 272, 1069, 1, 0, 0, 0, 274, 1073, 1, 0, 0, 0, 276, 1077, 1, 0, 0, 0, 278, 1081, 1, 0, 0, 0, 280, 1085, 1, 0, 0, 0, 282, 1089, 1, 0, 0, 0, 284, 1093, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1102, 1, 0, 0, 0, 290, 1106, 1, 0, 0, 0, 292, 1110, 1, 0, 0, 0, 294, 1114, 1, 0, 0, 0, 296, 1118, 1, 0, 0, 0, 298, 1122, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1136, 1, 0, 0, 0, 306, 1140, 1, 0, 0, 0, 308, 1144, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1153, 1, 0, 0, 0, 314, 1163, 1, 0, 0, 0, 316, 1167, 1, 0, 0, 0, 318, 1171, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1180, 1, 0, 0, 0, 324, 1187, 1, 0, 0, 0, 326, 1191, 1, 0, 0, 0, 328, 1195, 1, 0, 0, 0, 330, 1199, 1, 0, 0, 0, 332, 333, 5, 100, 0, 0, 333, 334, 5, 105, 0, 0, 334, 335, 5, 115, 0, 0, 335, 336, 5, 115, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 99, 0, 0, 338, 339, 5, 116, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 6, 0, 0, 0, 341, 13, 1, 0, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 114, 0, 0, 344, 345, 5, 111, 0, 0, 345, 346, 5, 112, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 6, 1, 1, 0, 348, 15, 1, 0, 0, 0, 349, 350, 5, 101, 0, 0, 350, 351, 5, 110, 0, 0, 351, 352, 5, 114, 0, 0, 352, 353, 5, 105, 0, 0, 353, 354, 5, 99, 0, 0, 354, 355, 5, 104, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 2, 2, 0, 357, 17, 1, 0, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 118, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 108, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 3, 0, 0, 364, 19, 1, 0, 0, 0, 365, 366, 5, 101, 0, 0, 366, 367, 5, 120, 0, 0, 367, 368, 5, 112, 0, 0, 368, 369, 5, 108, 0, 0, 369, 370, 5, 97, 0, 0, 370, 371, 5, 105, 0, 0, 371, 372, 5, 110, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 6, 4, 3, 0, 374, 21, 1, 0, 0, 0, 375, 376, 5, 102, 0, 0, 376, 377, 5, 114, 0, 0, 377, 378, 5, 111, 0, 0, 378, 379, 5, 109, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 5, 4, 0, 381, 23, 1, 0, 0, 0, 382, 383, 5, 103, 0, 0, 383, 384, 5, 114, 0, 0, 384, 385, 5, 111, 0, 0, 385, 386, 5, 107, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 6, 0, 0, 388, 25, 1, 0, 0, 0, 389, 390, 5, 105, 0, 0, 390, 391, 5, 110, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 5, 105, 0, 0, 393, 394, 5, 110, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 115, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 97, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 7, 0, 0, 402, 27, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 1, 0, 0, 0, 408, 409, 6, 8, 1, 0, 409, 29, 1, 0, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 105, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 9, 0, 0, 417, 31, 1, 0, 0, 0, 418, 419, 5, 109, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 116, 0, 0, 421, 422, 5, 97, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 10, 5, 0, 424, 33, 1, 0, 0, 0, 425, 426, 5, 109, 0, 0, 426, 427, 5, 118, 0, 0, 427, 428, 5, 95, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 120, 0, 0, 430, 431, 5, 112, 0, 0, 431, 432, 5, 97, 0, 0, 432, 433, 5, 110, 0, 0, 433, 434, 5, 100, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 11, 6, 0, 436, 35, 1, 0, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 101, 0, 0, 439, 440, 5, 110, 0, 0, 440, 441, 5, 97, 0, 0, 441, 442, 5, 109, 0, 0, 442, 443, 5, 101, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 12, 7, 0, 445, 37, 1, 0, 0, 0, 446, 447, 5, 114, 0, 0, 447, 448, 5, 111, 0, 0, 448, 449, 5, 119, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 13, 0, 0, 451, 39, 1, 0, 0, 0, 452, 453, 5, 115, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 119, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 14, 8, 0, 458, 41, 1, 0, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 111, 0, 0, 461, 462, 5, 114, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 15, 0, 0, 465, 43, 1, 0, 0, 0, 466, 467, 5, 115, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 97, 0, 0, 469, 470, 5, 116, 0, 0, 470, 471, 5, 115, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 16, 0, 0, 473, 45, 1, 0, 0, 0, 474, 475, 5, 119, 0, 0, 475, 476, 5, 104, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 17, 0, 0, 481, 47, 1, 0, 0, 0, 482, 484, 8, 0, 0, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 18, 0, 0, 488, 49, 1, 0, 0, 0, 489, 490, 5, 47, 0, 0, 490, 491, 5, 47, 0, 0, 491, 495, 1, 0, 0, 0, 492, 494, 8, 1, 0, 0, 493, 492, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 5, 13, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 502, 1, 0, 0, 0, 501, 503, 5, 10, 0, 0, 502, 501, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 19, 9, 0, 505, 51, 1, 0, 0, 0, 506, 507, 5, 47, 0, 0, 507, 508, 5, 42, 0, 0, 508, 513, 1, 0, 0, 0, 509, 512, 3, 52, 20, 0, 510, 512, 9, 0, 0, 0, 511, 509, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 5, 42, 0, 0, 517, 518, 5, 47, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 20, 9, 0, 520, 53, 1, 0, 0, 0, 521, 523, 7, 2, 0, 0, 522, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 21, 9, 0, 527, 55, 1, 0, 0, 0, 528, 529, 3, 164, 76, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 22, 10, 0, 531, 532, 6, 22, 11, 0, 532, 57, 1, 0, 0, 0, 533, 534, 3, 66, 27, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 23, 12, 0, 536, 537, 6, 23, 13, 0, 537, 59, 1, 0, 0, 0, 538, 539, 3, 54, 21, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 24, 9, 0, 541, 61, 1, 0, 0, 0, 542, 543, 3, 50, 19, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 25, 9, 0, 545, 63, 1, 0, 0, 0, 546, 547, 3, 52, 20, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 26, 9, 0, 549, 65, 1, 0, 0, 0, 550, 551, 5, 124, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 6, 27, 13, 0, 553, 67, 1, 0, 0, 0, 554, 555, 7, 3, 0, 0, 555, 69, 1, 0, 0, 0, 556, 557, 7, 4, 0, 0, 557, 71, 1, 0, 0, 0, 558, 559, 5, 92, 0, 0, 559, 560, 7, 5, 0, 0, 560, 73, 1, 0, 0, 0, 561, 562, 8, 6, 0, 0, 562, 75, 1, 0, 0, 0, 563, 565, 7, 7, 0, 0, 564, 566, 7, 8, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 568, 1, 0, 0, 0, 567, 569, 3, 68, 28, 0, 568, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 77, 1, 0, 0, 0, 572, 573, 5, 64, 0, 0, 573, 79, 1, 0, 0, 0, 574, 575, 5, 96, 0, 0, 575, 81, 1, 0, 0, 0, 576, 580, 8, 9, 0, 0, 577, 578, 5, 96, 0, 0, 578, 580, 5, 96, 0, 0, 579, 576, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 580, 83, 1, 0, 0, 0, 581, 582, 5, 95, 0, 0, 582, 85, 1, 0, 0, 0, 583, 587, 3, 70, 29, 0, 584, 587, 3, 68, 28, 0, 585, 587, 3, 84, 36, 0, 586, 583, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 585, 1, 0, 0, 0, 587, 87, 1, 0, 0, 0, 588, 593, 5, 34, 0, 0, 589, 592, 3, 72, 30, 0, 590, 592, 3, 74, 31, 0, 591, 589, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 596, 618, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 604, 1, 0, 0, 0, 601, 603, 8, 1, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 34, 0, 0, 608, 609, 5, 34, 0, 0, 609, 610, 5, 34, 0, 0, 610, 612, 1, 0, 0, 0, 611, 613, 5, 34, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 615, 1, 0, 0, 0, 614, 616, 5, 34, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 588, 1, 0, 0, 0, 617, 597, 1, 0, 0, 0, 618, 89, 1, 0, 0, 0, 619, 621, 3, 68, 28, 0, 620, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 91, 1, 0, 0, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 633, 3, 108, 48, 0, 630, 632, 3, 68, 28, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 667, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 108, 48, 0, 637, 639, 3, 68, 28, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 667, 1, 0, 0, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 654, 1, 0, 0, 0, 647, 651, 3, 108, 48, 0, 648, 650, 3, 68, 28, 0, 649, 648, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 655, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 654, 647, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 3, 76, 32, 0, 657, 667, 1, 0, 0, 0, 658, 660, 3, 108, 48, 0, 659, 661, 3, 68, 28, 0, 660, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 665, 3, 76, 32, 0, 665, 667, 1, 0, 0, 0, 666, 625, 1, 0, 0, 0, 666, 636, 1, 0, 0, 0, 666, 643, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 93, 1, 0, 0, 0, 668, 669, 5, 98, 0, 0, 669, 670, 5, 121, 0, 0, 670, 95, 1, 0, 0, 0, 671, 672, 5, 97, 0, 0, 672, 673, 5, 110, 0, 0, 673, 674, 5, 100, 0, 0, 674, 97, 1, 0, 0, 0, 675, 676, 5, 97, 0, 0, 676, 677, 5, 115, 0, 0, 677, 678, 5, 99, 0, 0, 678, 99, 1, 0, 0, 0, 679, 680, 5, 61, 0, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 58, 0, 0, 682, 683, 5, 58, 0, 0, 683, 103, 1, 0, 0, 0, 684, 685, 5, 44, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 100, 0, 0, 687, 688, 5, 101, 0, 0, 688, 689, 5, 115, 0, 0, 689, 690, 5, 99, 0, 0, 690, 107, 1, 0, 0, 0, 691, 692, 5, 46, 0, 0, 692, 109, 1, 0, 0, 0, 693, 694, 5, 102, 0, 0, 694, 695, 5, 97, 0, 0, 695, 696, 5, 108, 0, 0, 696, 697, 5, 115, 0, 0, 697, 698, 5, 101, 0, 0, 698, 111, 1, 0, 0, 0, 699, 700, 5, 102, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 114, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 97, 0, 0, 707, 708, 5, 115, 0, 0, 708, 709, 5, 116, 0, 0, 709, 115, 1, 0, 0, 0, 710, 711, 5, 40, 0, 0, 711, 117, 1, 0, 0, 0, 712, 713, 5, 105, 0, 0, 713, 714, 5, 110, 0, 0, 714, 119, 1, 0, 0, 0, 715, 716, 5, 105, 0, 0, 716, 717, 5, 115, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 108, 0, 0, 719, 720, 5, 105, 0, 0, 720, 721, 5, 107, 0, 0, 721, 722, 5, 101, 0, 0, 722, 123, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 111, 0, 0, 725, 726, 5, 116, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 127, 1, 0, 0, 0, 732, 733, 5, 110, 0, 0, 733, 734, 5, 117, 0, 0, 734, 735, 5, 108, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 115, 0, 0, 737, 129, 1, 0, 0, 0, 738, 739, 5, 111, 0, 0, 739, 740, 5, 114, 0, 0, 740, 131, 1, 0, 0, 0, 741, 742, 5, 63, 0, 0, 742, 133, 1, 0, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 108, 0, 0, 745, 746, 5, 105, 0, 0, 746, 747, 5, 107, 0, 0, 747, 748, 5, 101, 0, 0, 748, 135, 1, 0, 0, 0, 749, 750, 5, 41, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 116, 0, 0, 752, 753, 5, 114, 0, 0, 753, 754, 5, 117, 0, 0, 754, 755, 5, 101, 0, 0, 755, 139, 1, 0, 0, 0, 756, 757, 5, 61, 0, 0, 757, 758, 5, 61, 0, 0, 758, 141, 1, 0, 0, 0, 759, 760, 5, 61, 0, 0, 760, 761, 5, 126, 0, 0, 761, 143, 1, 0, 0, 0, 762, 763, 5, 33, 0, 0, 763, 764, 5, 61, 0, 0, 764, 145, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 147, 1, 0, 0, 0, 767, 768, 5, 60, 0, 0, 768, 769, 5, 61, 0, 0, 769, 149, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 5, 62, 0, 0, 773, 774, 5, 61, 0, 0, 774, 153, 1, 0, 0, 0, 775, 776, 5, 43, 0, 0, 776, 155, 1, 0, 0, 0, 777, 778, 5, 45, 0, 0, 778, 157, 1, 0, 0, 0, 779, 780, 5, 42, 0, 0, 780, 159, 1, 0, 0, 0, 781, 782, 5, 47, 0, 0, 782, 161, 1, 0, 0, 0, 783, 784, 5, 37, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 91, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 0, 0, 788, 789, 6, 76, 0, 0, 789, 165, 1, 0, 0, 0, 790, 791, 5, 93, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 77, 13, 0, 793, 794, 6, 77, 13, 0, 794, 167, 1, 0, 0, 0, 795, 799, 3, 70, 29, 0, 796, 798, 3, 86, 37, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 812, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 805, 3, 84, 36, 0, 803, 805, 3, 78, 33, 0, 804, 802, 1, 0, 0, 0, 804, 803, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 808, 3, 86, 37, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 812, 1, 0, 0, 0, 811, 795, 1, 0, 0, 0, 811, 804, 1, 0, 0, 0, 812, 169, 1, 0, 0, 0, 813, 815, 3, 80, 34, 0, 814, 816, 3, 82, 35, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 3, 80, 34, 0, 820, 171, 1, 0, 0, 0, 821, 822, 3, 170, 79, 0, 822, 173, 1, 0, 0, 0, 823, 824, 3, 50, 19, 0, 824, 825, 1, 0, 0, 0, 825, 826, 6, 81, 9, 0, 826, 175, 1, 0, 0, 0, 827, 828, 3, 52, 20, 0, 828, 829, 1, 0, 0, 0, 829, 830, 6, 82, 9, 0, 830, 177, 1, 0, 0, 0, 831, 832, 3, 54, 21, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 83, 9, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 66, 27, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 12, 0, 838, 839, 6, 84, 13, 0, 839, 181, 1, 0, 0, 0, 840, 841, 3, 164, 76, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 85, 10, 0, 843, 183, 1, 0, 0, 0, 844, 845, 3, 166, 77, 0, 845, 846, 1, 0, 0, 0, 846, 847, 6, 86, 14, 0, 847, 185, 1, 0, 0, 0, 848, 849, 3, 104, 46, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 87, 15, 0, 851, 187, 1, 0, 0, 0, 852, 853, 3, 100, 44, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 88, 16, 0, 855, 189, 1, 0, 0, 0, 856, 857, 3, 88, 38, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 89, 17, 0, 859, 191, 1, 0, 0, 0, 860, 861, 5, 111, 0, 0, 861, 862, 5, 112, 0, 0, 862, 863, 5, 116, 0, 0, 863, 864, 5, 105, 0, 0, 864, 865, 5, 111, 0, 0, 865, 866, 5, 110, 0, 0, 866, 867, 5, 115, 0, 0, 867, 193, 1, 0, 0, 0, 868, 869, 5, 109, 0, 0, 869, 870, 5, 101, 0, 0, 870, 871, 5, 116, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 100, 0, 0, 873, 874, 5, 97, 0, 0, 874, 875, 5, 116, 0, 0, 875, 876, 5, 97, 0, 0, 876, 195, 1, 0, 0, 0, 877, 881, 8, 10, 0, 0, 878, 879, 5, 47, 0, 0, 879, 881, 8, 11, 0, 0, 880, 877, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 884, 3, 196, 92, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 199, 1, 0, 0, 0, 887, 888, 3, 172, 80, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 94, 18, 0, 890, 201, 1, 0, 0, 0, 891, 892, 3, 50, 19, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 95, 9, 0, 894, 203, 1, 0, 0, 0, 895, 896, 3, 52, 20, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 96, 9, 0, 898, 205, 1, 0, 0, 0, 899, 900, 3, 54, 21, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 97, 9, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 66, 27, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 12, 0, 906, 907, 6, 98, 13, 0, 907, 209, 1, 0, 0, 0, 908, 909, 3, 108, 48, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 19, 0, 911, 211, 1, 0, 0, 0, 912, 913, 3, 104, 46, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 15, 0, 915, 213, 1, 0, 0, 0, 916, 921, 3, 70, 29, 0, 917, 921, 3, 68, 28, 0, 918, 921, 3, 84, 36, 0, 919, 921, 3, 158, 73, 0, 920, 916, 1, 0, 0, 0, 920, 917, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 919, 1, 0, 0, 0, 921, 215, 1, 0, 0, 0, 922, 925, 3, 70, 29, 0, 923, 925, 3, 158, 73, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 929, 1, 0, 0, 0, 926, 928, 3, 214, 101, 0, 927, 926, 1, 0, 0, 0, 928, 931, 1, 0, 0, 0, 929, 927, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, 942, 1, 0, 0, 0, 931, 929, 1, 0, 0, 0, 932, 935, 3, 84, 36, 0, 933, 935, 3, 78, 33, 0, 934, 932, 1, 0, 0, 0, 934, 933, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 938, 3, 214, 101, 0, 937, 936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 942, 1, 0, 0, 0, 941, 924, 1, 0, 0, 0, 941, 934, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 946, 3, 216, 102, 0, 944, 946, 3, 170, 79, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 219, 1, 0, 0, 0, 949, 950, 3, 50, 19, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 104, 9, 0, 952, 221, 1, 0, 0, 0, 953, 954, 3, 52, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 105, 9, 0, 956, 223, 1, 0, 0, 0, 957, 958, 3, 54, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 106, 9, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 66, 27, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 12, 0, 964, 965, 6, 107, 13, 0, 965, 227, 1, 0, 0, 0, 966, 967, 3, 100, 44, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 108, 16, 0, 969, 229, 1, 0, 0, 0, 970, 971, 3, 104, 46, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 109, 15, 0, 973, 231, 1, 0, 0, 0, 974, 975, 3, 108, 48, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 110, 19, 0, 977, 233, 1, 0, 0, 0, 978, 979, 5, 97, 0, 0, 979, 980, 5, 115, 0, 0, 980, 235, 1, 0, 0, 0, 981, 982, 3, 218, 103, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 112, 20, 0, 984, 237, 1, 0, 0, 0, 985, 986, 3, 50, 19, 0, 986, 987, 1, 0, 0, 0, 987, 988, 6, 113, 9, 0, 988, 239, 1, 0, 0, 0, 989, 990, 3, 52, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 114, 9, 0, 992, 241, 1, 0, 0, 0, 993, 994, 3, 54, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 115, 9, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 66, 27, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 12, 0, 1000, 1001, 6, 116, 13, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 3, 164, 76, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 10, 0, 1005, 1006, 6, 117, 21, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 111, 0, 0, 1008, 1009, 5, 110, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 118, 22, 0, 1011, 249, 1, 0, 0, 0, 1012, 1013, 5, 119, 0, 0, 1013, 1014, 5, 105, 0, 0, 1014, 1015, 5, 116, 0, 0, 1015, 1016, 5, 104, 0, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 119, 22, 0, 1018, 251, 1, 0, 0, 0, 1019, 1020, 8, 12, 0, 0, 1020, 253, 1, 0, 0, 0, 1021, 1023, 3, 252, 120, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 3, 322, 155, 0, 1027, 1029, 1, 0, 0, 0, 1028, 1022, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 1032, 3, 252, 120, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1031, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 255, 1, 0, 0, 0, 1035, 1036, 3, 172, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 122, 18, 0, 1038, 257, 1, 0, 0, 0, 1039, 1040, 3, 254, 121, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 123, 23, 0, 1042, 259, 1, 0, 0, 0, 1043, 1044, 3, 50, 19, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 124, 9, 0, 1046, 261, 1, 0, 0, 0, 1047, 1048, 3, 52, 20, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 125, 9, 0, 1050, 263, 1, 0, 0, 0, 1051, 1052, 3, 54, 21, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 126, 9, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 66, 27, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 12, 0, 1058, 1059, 6, 127, 13, 0, 1059, 1060, 6, 127, 13, 0, 1060, 267, 1, 0, 0, 0, 1061, 1062, 3, 100, 44, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 128, 16, 0, 1064, 269, 1, 0, 0, 0, 1065, 1066, 3, 104, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 129, 15, 0, 1068, 271, 1, 0, 0, 0, 1069, 1070, 3, 108, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 130, 19, 0, 1072, 273, 1, 0, 0, 0, 1073, 1074, 3, 250, 119, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 131, 24, 0, 1076, 275, 1, 0, 0, 0, 1077, 1078, 3, 218, 103, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 132, 20, 0, 1080, 277, 1, 0, 0, 0, 1081, 1082, 3, 172, 80, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 133, 18, 0, 1084, 279, 1, 0, 0, 0, 1085, 1086, 3, 50, 19, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 134, 9, 0, 1088, 281, 1, 0, 0, 0, 1089, 1090, 3, 52, 20, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 135, 9, 0, 1092, 283, 1, 0, 0, 0, 1093, 1094, 3, 54, 21, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 136, 9, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 66, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 12, 0, 1100, 1101, 6, 137, 13, 0, 1101, 287, 1, 0, 0, 0, 1102, 1103, 3, 108, 48, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 138, 19, 0, 1105, 289, 1, 0, 0, 0, 1106, 1107, 3, 172, 80, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 139, 18, 0, 1109, 291, 1, 0, 0, 0, 1110, 1111, 3, 168, 78, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 140, 25, 0, 1113, 293, 1, 0, 0, 0, 1114, 1115, 3, 50, 19, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 141, 9, 0, 1117, 295, 1, 0, 0, 0, 1118, 1119, 3, 52, 20, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 142, 9, 0, 1121, 297, 1, 0, 0, 0, 1122, 1123, 3, 54, 21, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 143, 9, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 3, 66, 27, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 144, 12, 0, 1129, 1130, 6, 144, 13, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 5, 105, 0, 0, 1132, 1133, 5, 110, 0, 0, 1133, 1134, 5, 102, 0, 0, 1134, 1135, 5, 111, 0, 0, 1135, 303, 1, 0, 0, 0, 1136, 1137, 3, 50, 19, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 146, 9, 0, 1139, 305, 1, 0, 0, 0, 1140, 1141, 3, 52, 20, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 6, 147, 9, 0, 1143, 307, 1, 0, 0, 0, 1144, 1145, 3, 54, 21, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 148, 9, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 3, 66, 27, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 149, 12, 0, 1151, 1152, 6, 149, 13, 0, 1152, 311, 1, 0, 0, 0, 1153, 1154, 5, 102, 0, 0, 1154, 1155, 5, 117, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 99, 0, 0, 1157, 1158, 5, 116, 0, 0, 1158, 1159, 5, 105, 0, 0, 1159, 1160, 5, 111, 0, 0, 1160, 1161, 5, 110, 0, 0, 1161, 1162, 5, 115, 0, 0, 1162, 313, 1, 0, 0, 0, 1163, 1164, 3, 50, 19, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 6, 151, 9, 0, 1166, 315, 1, 0, 0, 0, 1167, 1168, 3, 52, 20, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 152, 9, 0, 1170, 317, 1, 0, 0, 0, 1171, 1172, 3, 54, 21, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 153, 9, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 3, 166, 77, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 154, 14, 0, 1178, 1179, 6, 154, 13, 0, 1179, 321, 1, 0, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 323, 1, 0, 0, 0, 1182, 1188, 3, 78, 33, 0, 1183, 1188, 3, 68, 28, 0, 1184, 1188, 3, 108, 48, 0, 1185, 1188, 3, 70, 29, 0, 1186, 1188, 3, 84, 36, 0, 1187, 1182, 1, 0, 0, 0, 1187, 1183, 1, 0, 0, 0, 1187, 1184, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1189, 1190, 1, 0, 0, 0, 1190, 325, 1, 0, 0, 0, 1191, 1192, 3, 50, 19, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 157, 9, 0, 1194, 327, 1, 0, 0, 0, 1195, 1196, 3, 52, 20, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 158, 9, 0, 1198, 329, 1, 0, 0, 0, 1199, 1200, 3, 54, 21, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 159, 9, 0, 1202, 331, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 485, 495, 499, 502, 511, 513, 524, 565, 570, 579, 586, 591, 593, 604, 612, 615, 617, 622, 627, 633, 640, 645, 651, 654, 662, 666, 799, 804, 809, 811, 817, 880, 885, 920, 924, 929, 934, 939, 941, 945, 947, 1024, 1028, 1033, 1187, 1189, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 68, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 23beb8c26be5b..ac3354d0aa907 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -22,22 +22,22 @@ public class EsqlBaseLexer extends Lexer { STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, - LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, - PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, - GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, - CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, - EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, OPTIONS=71, METADATA=72, FROM_UNQUOTED_IDENTIFIER=73, - FROM_LINE_COMMENT=74, FROM_MULTILINE_COMMENT=75, FROM_WS=76, ID_PATTERN=77, - PROJECT_LINE_COMMENT=78, PROJECT_MULTILINE_COMMENT=79, PROJECT_WS=80, - AS=81, RENAME_LINE_COMMENT=82, RENAME_MULTILINE_COMMENT=83, RENAME_WS=84, - ON=85, WITH=86, ENRICH_POLICY_NAME=87, ENRICH_LINE_COMMENT=88, ENRICH_MULTILINE_COMMENT=89, - ENRICH_WS=90, ENRICH_FIELD_LINE_COMMENT=91, ENRICH_FIELD_MULTILINE_COMMENT=92, - ENRICH_FIELD_WS=93, MVEXPAND_LINE_COMMENT=94, MVEXPAND_MULTILINE_COMMENT=95, - MVEXPAND_WS=96, INFO=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, - SHOW_WS=100, FUNCTIONS=101, META_LINE_COMMENT=102, META_MULTILINE_COMMENT=103, - META_WS=104, COLON=105, SETTING=106, SETTING_LINE_COMMENT=107, SETTTING_MULTILINE_COMMENT=108, - SETTING_WS=109; + AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, + FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, + OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, + LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, + METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, + FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, + PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, + RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, + ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, + ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, + MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, + SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, + META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, + SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, @@ -61,19 +61,19 @@ private static String[] makeRuleNames() { "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", - "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", - "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", - "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", - "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", - "FROM_QUOTED_STRING", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", - "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", - "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", + "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", + "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", + "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", + "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", + "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", + "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", @@ -96,7 +96,7 @@ private static String[] makeLiteralNames() { "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", @@ -114,10 +114,10 @@ private static String[] makeSymbolicNames() { "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", - "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", - "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", + "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", + "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", @@ -191,7 +191,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000m\u04ae\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000n\u04b3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ @@ -234,716 +234,720 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ + "\u0002\u009f\u0007\u009f\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0012\u0004\u0012\u01e2\b\u0012\u000b\u0012\f\u0012\u01e3\u0001\u0012"+ - "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013"+ - "\u01ec\b\u0013\n\u0013\f\u0013\u01ef\t\u0013\u0001\u0013\u0003\u0013\u01f2"+ - "\b\u0013\u0001\u0013\u0003\u0013\u01f5\b\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ - "\u01fe\b\u0014\n\u0014\f\u0014\u0201\t\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0209\b\u0015\u000b"+ - "\u0015\f\u0015\u020a\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ - "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u0234\b \u0001 \u0004"+ - " \u0237\b \u000b \f \u0238\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001"+ - "#\u0001#\u0003#\u0242\b#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u0249"+ - "\b%\u0001&\u0001&\u0001&\u0005&\u024e\b&\n&\f&\u0251\t&\u0001&\u0001&"+ - "\u0001&\u0001&\u0001&\u0001&\u0005&\u0259\b&\n&\f&\u025c\t&\u0001&\u0001"+ - "&\u0001&\u0001&\u0001&\u0003&\u0263\b&\u0001&\u0003&\u0266\b&\u0003&\u0268"+ - "\b&\u0001\'\u0004\'\u026b\b\'\u000b\'\f\'\u026c\u0001(\u0004(\u0270\b"+ - "(\u000b(\f(\u0271\u0001(\u0001(\u0005(\u0276\b(\n(\f(\u0279\t(\u0001("+ - "\u0001(\u0004(\u027d\b(\u000b(\f(\u027e\u0001(\u0004(\u0282\b(\u000b("+ - "\f(\u0283\u0001(\u0001(\u0005(\u0288\b(\n(\f(\u028b\t(\u0003(\u028d\b"+ - "(\u0001(\u0001(\u0001(\u0001(\u0004(\u0293\b(\u000b(\f(\u0294\u0001(\u0001"+ - "(\u0003(\u0299\b(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0001"+ - "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00010\u00010\u00010\u0001"+ - "0\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u00012\u0001"+ - "2\u00012\u00013\u00013\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00018\u0001"+ - "8\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ - "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001"+ - "C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001E\u0001F\u0001F\u0001G\u0001"+ - "G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001K\u0001"+ - "K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0005M\u0319"+ - "\bM\nM\fM\u031c\tM\u0001M\u0001M\u0003M\u0320\bM\u0001M\u0004M\u0323\b"+ - "M\u000bM\fM\u0324\u0003M\u0327\bM\u0001N\u0001N\u0004N\u032b\bN\u000b"+ - "N\fN\u032c\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ - "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001"+ - "S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001"+ - "U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001"+ - "X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "[\u0001[\u0001[\u0003[\u036c\b[\u0001\\\u0004\\\u036f\b\\\u000b\\\f\\"+ - "\u0370\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_"+ - "\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001"+ - "a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001"+ - "c\u0001d\u0001d\u0001d\u0001d\u0003d\u0394\bd\u0001e\u0001e\u0003e\u0398"+ - "\be\u0001e\u0005e\u039b\be\ne\fe\u039e\te\u0001e\u0001e\u0003e\u03a2\b"+ - "e\u0001e\u0004e\u03a5\be\u000be\fe\u03a6\u0003e\u03a9\be\u0001f\u0001"+ - "f\u0004f\u03ad\bf\u000bf\ff\u03ae\u0001g\u0001g\u0001g\u0001g\u0001h\u0001"+ - "h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001"+ - "j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001"+ - "m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ - "o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ - "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ - "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001"+ - "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001x\u0004x\u03fa\bx\u000b"+ - "x\fx\u03fb\u0001x\u0001x\u0003x\u0400\bx\u0001x\u0004x\u0403\bx\u000b"+ - "x\fx\u0404\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001"+ - "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f"+ - "\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080"+ - "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082"+ - "\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083"+ - "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086"+ - "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088"+ - "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089"+ - "\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b"+ - "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c"+ - "\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f"+ - "\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092"+ - "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093"+ - "\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ - "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099"+ - "\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009b"+ - "\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0004\u009b\u049f\b\u009b"+ - "\u000b\u009b\f\u009b\u04a0\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c"+ - "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e"+ - "\u0001\u009e\u0001\u009e\u0002\u01ff\u025a\u0000\u009f\f\u0001\u000e\u0002"+ - "\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c"+ - "\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u0014"+ - "4\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000"+ - "H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c"+ - "\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ - "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ - ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8\u0000\u00aaC"+ - "\u00acD\u00aeE\u00b0F\u00b2\u0000\u00b4\u0000\u00b6\u0000\u00b8\u0000"+ - "\u00ba\u0000\u00bc\u0000\u00beG\u00c0H\u00c2\u0000\u00c4I\u00c6\u0000"+ - "\u00c8J\u00caK\u00ccL\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000"+ - "\u00d6\u0000\u00d8M\u00daN\u00dcO\u00deP\u00e0\u0000\u00e2\u0000\u00e4"+ - "\u0000\u00e6\u0000\u00e8Q\u00ea\u0000\u00ecR\u00eeS\u00f0T\u00f2\u0000"+ - "\u00f4\u0000\u00f6U\u00f8V\u00fa\u0000\u00fcW\u00fe\u0000\u0100\u0000"+ - "\u0102X\u0104Y\u0106Z\u0108\u0000\u010a\u0000\u010c\u0000\u010e\u0000"+ - "\u0110\u0000\u0112\u0000\u0114\u0000\u0116[\u0118\\\u011a]\u011c\u0000"+ - "\u011e\u0000\u0120\u0000\u0122\u0000\u0124^\u0126_\u0128`\u012a\u0000"+ - "\u012ca\u012eb\u0130c\u0132d\u0134\u0000\u0136e\u0138f\u013ag\u013ch\u013e"+ - "\u0000\u0140i\u0142j\u0144k\u0146l\u0148m\f\u0000\u0001\u0002\u0003\u0004"+ - "\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000"+ - "\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000"+ - "\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000"+ - "++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04c9\u0000\f\u0001\u0000\u0000\u0000"+ - "\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000"+ - "\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000"+ - "\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000"+ - "\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000"+ - "\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000"+ - "\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001"+ - "\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000"+ - "\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u0000"+ - "0\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001"+ - "\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000"+ - "\u0000\u0001:\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001"+ - ">\u0001\u0000\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0002B\u0001"+ - "\u0000\u0000\u0000\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000"+ - "\u0000\u0002\\\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000"+ - "\u0002`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d"+ - "\u0001\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000"+ - "\u0000\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000"+ - "\u0002n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r"+ - "\u0001\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000"+ - "\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000"+ - "\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080"+ - "\u0001\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084"+ - "\u0001\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088"+ - "\u0001\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c"+ - "\u0001\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090"+ - "\u0001\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094"+ - "\u0001\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098"+ - "\u0001\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c"+ - "\u0001\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0"+ - "\u0001\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4"+ - "\u0001\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00aa"+ - "\u0001\u0000\u0000\u0000\u0002\u00ac\u0001\u0000\u0000\u0000\u0002\u00ae"+ - "\u0001\u0000\u0000\u0000\u0002\u00b0\u0001\u0000\u0000\u0000\u0003\u00b2"+ - "\u0001\u0000\u0000\u0000\u0003\u00b4\u0001\u0000\u0000\u0000\u0003\u00b6"+ - "\u0001\u0000\u0000\u0000\u0003\u00b8\u0001\u0000\u0000\u0000\u0003\u00ba"+ - "\u0001\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be"+ - "\u0001\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c4"+ - "\u0001\u0000\u0000\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003\u00c8"+ - "\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc"+ - "\u0001\u0000\u0000\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0"+ - "\u0001\u0000\u0000\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d8"+ - "\u0001\u0000\u0000\u0000\u0004\u00da\u0001\u0000\u0000\u0000\u0004\u00dc"+ - "\u0001\u0000\u0000\u0000\u0004\u00de\u0001\u0000\u0000\u0000\u0005\u00e0"+ - "\u0001\u0000\u0000\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4"+ - "\u0001\u0000\u0000\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8"+ - "\u0001\u0000\u0000\u0000\u0005\u00ea\u0001\u0000\u0000\u0000\u0005\u00ec"+ - "\u0001\u0000\u0000\u0000\u0005\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0"+ - "\u0001\u0000\u0000\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4"+ - "\u0001\u0000\u0000\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8"+ - "\u0001\u0000\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe"+ - "\u0001\u0000\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102"+ - "\u0001\u0000\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106"+ - "\u0001\u0000\u0000\u0000\u0007\u0108\u0001\u0000\u0000\u0000\u0007\u010a"+ - "\u0001\u0000\u0000\u0000\u0007\u010c\u0001\u0000\u0000\u0000\u0007\u010e"+ - "\u0001\u0000\u0000\u0000\u0007\u0110\u0001\u0000\u0000\u0000\u0007\u0112"+ - "\u0001\u0000\u0000\u0000\u0007\u0114\u0001\u0000\u0000\u0000\u0007\u0116"+ - "\u0001\u0000\u0000\u0000\u0007\u0118\u0001\u0000\u0000\u0000\u0007\u011a"+ - "\u0001\u0000\u0000\u0000\b\u011c\u0001\u0000\u0000\u0000\b\u011e\u0001"+ - "\u0000\u0000\u0000\b\u0120\u0001\u0000\u0000\u0000\b\u0122\u0001\u0000"+ - "\u0000\u0000\b\u0124\u0001\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000"+ - "\u0000\b\u0128\u0001\u0000\u0000\u0000\t\u012a\u0001\u0000\u0000\u0000"+ - "\t\u012c\u0001\u0000\u0000\u0000\t\u012e\u0001\u0000\u0000\u0000\t\u0130"+ - "\u0001\u0000\u0000\u0000\t\u0132\u0001\u0000\u0000\u0000\n\u0134\u0001"+ - "\u0000\u0000\u0000\n\u0136\u0001\u0000\u0000\u0000\n\u0138\u0001\u0000"+ - "\u0000\u0000\n\u013a\u0001\u0000\u0000\u0000\n\u013c\u0001\u0000\u0000"+ - "\u0000\u000b\u013e\u0001\u0000\u0000\u0000\u000b\u0140\u0001\u0000\u0000"+ - "\u0000\u000b\u0142\u0001\u0000\u0000\u0000\u000b\u0144\u0001\u0000\u0000"+ - "\u0000\u000b\u0146\u0001\u0000\u0000\u0000\u000b\u0148\u0001\u0000\u0000"+ - "\u0000\f\u014a\u0001\u0000\u0000\u0000\u000e\u0154\u0001\u0000\u0000\u0000"+ - "\u0010\u015b\u0001\u0000\u0000\u0000\u0012\u0164\u0001\u0000\u0000\u0000"+ - "\u0014\u016b\u0001\u0000\u0000\u0000\u0016\u0175\u0001\u0000\u0000\u0000"+ - "\u0018\u017c\u0001\u0000\u0000\u0000\u001a\u0183\u0001\u0000\u0000\u0000"+ - "\u001c\u0191\u0001\u0000\u0000\u0000\u001e\u0198\u0001\u0000\u0000\u0000"+ - " \u01a0\u0001\u0000\u0000\u0000\"\u01a7\u0001\u0000\u0000\u0000$\u01b3"+ - "\u0001\u0000\u0000\u0000&\u01bc\u0001\u0000\u0000\u0000(\u01c2\u0001\u0000"+ - "\u0000\u0000*\u01c9\u0001\u0000\u0000\u0000,\u01d0\u0001\u0000\u0000\u0000"+ - ".\u01d8\u0001\u0000\u0000\u00000\u01e1\u0001\u0000\u0000\u00002\u01e7"+ - "\u0001\u0000\u0000\u00004\u01f8\u0001\u0000\u0000\u00006\u0208\u0001\u0000"+ - "\u0000\u00008\u020e\u0001\u0000\u0000\u0000:\u0213\u0001\u0000\u0000\u0000"+ - "<\u0218\u0001\u0000\u0000\u0000>\u021c\u0001\u0000\u0000\u0000@\u0220"+ - "\u0001\u0000\u0000\u0000B\u0224\u0001\u0000\u0000\u0000D\u0228\u0001\u0000"+ - "\u0000\u0000F\u022a\u0001\u0000\u0000\u0000H\u022c\u0001\u0000\u0000\u0000"+ - "J\u022f\u0001\u0000\u0000\u0000L\u0231\u0001\u0000\u0000\u0000N\u023a"+ - "\u0001\u0000\u0000\u0000P\u023c\u0001\u0000\u0000\u0000R\u0241\u0001\u0000"+ - "\u0000\u0000T\u0243\u0001\u0000\u0000\u0000V\u0248\u0001\u0000\u0000\u0000"+ - "X\u0267\u0001\u0000\u0000\u0000Z\u026a\u0001\u0000\u0000\u0000\\\u0298"+ - "\u0001\u0000\u0000\u0000^\u029a\u0001\u0000\u0000\u0000`\u029d\u0001\u0000"+ - "\u0000\u0000b\u02a1\u0001\u0000\u0000\u0000d\u02a5\u0001\u0000\u0000\u0000"+ - "f\u02a7\u0001\u0000\u0000\u0000h\u02a9\u0001\u0000\u0000\u0000j\u02ae"+ - "\u0001\u0000\u0000\u0000l\u02b0\u0001\u0000\u0000\u0000n\u02b6\u0001\u0000"+ - "\u0000\u0000p\u02bc\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ - "t\u02c3\u0001\u0000\u0000\u0000v\u02c6\u0001\u0000\u0000\u0000x\u02c9"+ - "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d2\u0001\u0000"+ - "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dd\u0001\u0000\u0000"+ - "\u0000\u0082\u02e0\u0001\u0000\u0000\u0000\u0084\u02e2\u0001\u0000\u0000"+ - "\u0000\u0086\u02e8\u0001\u0000\u0000\u0000\u0088\u02ea\u0001\u0000\u0000"+ - "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f2\u0001\u0000\u0000"+ - "\u0000\u008e\u02f5\u0001\u0000\u0000\u0000\u0090\u02f8\u0001\u0000\u0000"+ - "\u0000\u0092\u02fa\u0001\u0000\u0000\u0000\u0094\u02fd\u0001\u0000\u0000"+ - "\u0000\u0096\u02ff\u0001\u0000\u0000\u0000\u0098\u0302\u0001\u0000\u0000"+ - "\u0000\u009a\u0304\u0001\u0000\u0000\u0000\u009c\u0306\u0001\u0000\u0000"+ - "\u0000\u009e\u0308\u0001\u0000\u0000\u0000\u00a0\u030a\u0001\u0000\u0000"+ - "\u0000\u00a2\u030c\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ - "\u0000\u00a6\u0326\u0001\u0000\u0000\u0000\u00a8\u0328\u0001\u0000\u0000"+ - "\u0000\u00aa\u0330\u0001\u0000\u0000\u0000\u00ac\u0332\u0001\u0000\u0000"+ - "\u0000\u00ae\u0336\u0001\u0000\u0000\u0000\u00b0\u033a\u0001\u0000\u0000"+ - "\u0000\u00b2\u033e\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ - "\u0000\u00b6\u0347\u0001\u0000\u0000\u0000\u00b8\u034b\u0001\u0000\u0000"+ - "\u0000\u00ba\u034f\u0001\u0000\u0000\u0000\u00bc\u0353\u0001\u0000\u0000"+ - "\u0000\u00be\u0357\u0001\u0000\u0000\u0000\u00c0\u035f\u0001\u0000\u0000"+ - "\u0000\u00c2\u036b\u0001\u0000\u0000\u0000\u00c4\u036e\u0001\u0000\u0000"+ - "\u0000\u00c6\u0372\u0001\u0000\u0000\u0000\u00c8\u0376\u0001\u0000\u0000"+ - "\u0000\u00ca\u037a\u0001\u0000\u0000\u0000\u00cc\u037e\u0001\u0000\u0000"+ - "\u0000\u00ce\u0382\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ - "\u0000\u00d2\u038b\u0001\u0000\u0000\u0000\u00d4\u0393\u0001\u0000\u0000"+ - "\u0000\u00d6\u03a8\u0001\u0000\u0000\u0000\u00d8\u03ac\u0001\u0000\u0000"+ - "\u0000\u00da\u03b0\u0001\u0000\u0000\u0000\u00dc\u03b4\u0001\u0000\u0000"+ - "\u0000\u00de\u03b8\u0001\u0000\u0000\u0000\u00e0\u03bc\u0001\u0000\u0000"+ - "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c5\u0001\u0000\u0000"+ - "\u0000\u00e6\u03c9\u0001\u0000\u0000\u0000\u00e8\u03cd\u0001\u0000\u0000"+ - "\u0000\u00ea\u03d0\u0001\u0000\u0000\u0000\u00ec\u03d4\u0001\u0000\u0000"+ - "\u0000\u00ee\u03d8\u0001\u0000\u0000\u0000\u00f0\u03dc\u0001\u0000\u0000"+ - "\u0000\u00f2\u03e0\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ + "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01e4\b\u0012\u000b\u0012\f"+ + "\u0012\u01e5\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0005\u0013\u01ee\b\u0013\n\u0013\f\u0013\u01f1\t\u0013\u0001"+ + "\u0013\u0003\u0013\u01f4\b\u0013\u0001\u0013\u0003\u0013\u01f7\b\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0005\u0014\u0200\b\u0014\n\u0014\f\u0014\u0203\t\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ + "\u0015\u020b\b\u0015\u000b\u0015\f\u0015\u020c\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ + " \u0003 \u0236\b \u0001 \u0004 \u0239\b \u000b \f \u023a\u0001!\u0001"+ + "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0244\b#\u0001$\u0001$\u0001"+ + "%\u0001%\u0001%\u0003%\u024b\b%\u0001&\u0001&\u0001&\u0005&\u0250\b&\n"+ + "&\f&\u0253\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u025b\b"+ + "&\n&\f&\u025e\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0265\b&\u0001"+ + "&\u0003&\u0268\b&\u0003&\u026a\b&\u0001\'\u0004\'\u026d\b\'\u000b\'\f"+ + "\'\u026e\u0001(\u0004(\u0272\b(\u000b(\f(\u0273\u0001(\u0001(\u0005(\u0278"+ + "\b(\n(\f(\u027b\t(\u0001(\u0001(\u0004(\u027f\b(\u000b(\f(\u0280\u0001"+ + "(\u0004(\u0284\b(\u000b(\f(\u0285\u0001(\u0001(\u0005(\u028a\b(\n(\f("+ + "\u028d\t(\u0003(\u028f\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0295\b("+ + "\u000b(\f(\u0296\u0001(\u0001(\u0003(\u029b\b(\u0001)\u0001)\u0001)\u0001"+ + "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001"+ + "0\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ + "2\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u0001"+ + "4\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ + "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ + "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001"+ + "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ + "?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ + "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ + "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005N\u031e\bN\nN\fN\u0321\tN\u0001"+ + "N\u0001N\u0003N\u0325\bN\u0001N\u0004N\u0328\bN\u000bN\fN\u0329\u0003"+ + "N\u032c\bN\u0001O\u0001O\u0004O\u0330\bO\u000bO\fO\u0331\u0001O\u0001"+ + "O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ + "R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ + "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ + "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ + "[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003"+ + "\\\u0371\b\\\u0001]\u0004]\u0374\b]\u000b]\f]\u0375\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ + "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ + "e\u0003e\u0399\be\u0001f\u0001f\u0003f\u039d\bf\u0001f\u0005f\u03a0\b"+ + "f\nf\ff\u03a3\tf\u0001f\u0001f\u0003f\u03a7\bf\u0001f\u0004f\u03aa\bf"+ + "\u000bf\ff\u03ab\u0003f\u03ae\bf\u0001g\u0001g\u0004g\u03b2\bg\u000bg"+ + "\fg\u03b3\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ + "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ + "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ + "n\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ + "q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001"+ + "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ + "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ + "w\u0001w\u0001x\u0001x\u0001y\u0004y\u03ff\by\u000by\fy\u0400\u0001y\u0001"+ + "y\u0003y\u0405\by\u0001y\u0004y\u0408\by\u000by\fy\u0409\u0001z\u0001"+ + "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ + "|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f"+ + "\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ + "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ + "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ + "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ + "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ + "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ + "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ + "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a"+ + "\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b"+ + "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ + "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ + "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ + "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ + "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ + "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ + "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ + "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ + "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ + "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ + "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0004\u009c"+ + "\u04a4\b\u009c\u000b\u009c\f\u009c\u04a5\u0001\u009d\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0002\u0201\u025c\u0000\u00a0"+ + "\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018"+ + "\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011"+ + ".\u00120\u00132\u00144\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019"+ + "B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000"+ + "V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+"+ + "z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6\u00907"+ + "\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4"+ + "A\u00a6B\u00a8C\u00aa\u0000\u00acD\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6"+ + "\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4"+ + "\u0000\u00c6J\u00c8\u0000\u00caK\u00ccL\u00ceM\u00d0\u0000\u00d2\u0000"+ + "\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00daN\u00dcO\u00deP\u00e0Q\u00e2"+ + "\u0000\u00e4\u0000\u00e6\u0000\u00e8\u0000\u00eaR\u00ec\u0000\u00eeS\u00f0"+ + "T\u00f2U\u00f4\u0000\u00f6\u0000\u00f8V\u00faW\u00fc\u0000\u00feX\u0100"+ + "\u0000\u0102\u0000\u0104Y\u0106Z\u0108[\u010a\u0000\u010c\u0000\u010e"+ + "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116\u0000\u0118\\\u011a]"+ + "\u011c^\u011e\u0000\u0120\u0000\u0122\u0000\u0124\u0000\u0126_\u0128`"+ + "\u012aa\u012c\u0000\u012eb\u0130c\u0132d\u0134e\u0136\u0000\u0138f\u013a"+ + "g\u013ch\u013ei\u0140\u0000\u0142j\u0144k\u0146l\u0148m\u014an\f\u0000"+ + "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t"+ + "\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u00000"+ + "9\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ + "\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,/"+ + "/==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04ce"+ + "\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ + "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ + "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ + "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ + "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ + " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ + "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ + "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ + ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ + "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ + "\u0000\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001"+ + "<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001"+ + "\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000"+ + "\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000"+ + "\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b"+ + "\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000"+ + "\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000"+ + "\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p"+ + "\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000"+ + "\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000"+ + "\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~"+ + "\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082"+ + "\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086"+ + "\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a"+ + "\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e"+ + "\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092"+ + "\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096"+ + "\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a"+ + "\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e"+ + "\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2"+ + "\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6"+ + "\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000\u0000\u0002\u00ac"+ + "\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0"+ + "\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4"+ + "\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8"+ + "\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc"+ + "\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000\u0000\u0003\u00c0"+ + "\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003\u00c6"+ + "\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca"+ + "\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce"+ + "\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000\u0000\u0004\u00d2"+ + "\u0001\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00da"+ + "\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de"+ + "\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2"+ + "\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6"+ + "\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea"+ + "\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee"+ + "\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2"+ + "\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6"+ + "\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa"+ + "\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100"+ + "\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104"+ + "\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108"+ + "\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c"+ + "\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110"+ + "\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114"+ + "\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118"+ + "\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c"+ + "\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001"+ + "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ + "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ + "\u0000\b\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000"+ + "\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ + "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001"+ + "\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ + "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ + "\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000"+ + "\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000"+ + "\u0000\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000"+ + "\u0000\f\u014c\u0001\u0000\u0000\u0000\u000e\u0156\u0001\u0000\u0000\u0000"+ + "\u0010\u015d\u0001\u0000\u0000\u0000\u0012\u0166\u0001\u0000\u0000\u0000"+ + "\u0014\u016d\u0001\u0000\u0000\u0000\u0016\u0177\u0001\u0000\u0000\u0000"+ + "\u0018\u017e\u0001\u0000\u0000\u0000\u001a\u0185\u0001\u0000\u0000\u0000"+ + "\u001c\u0193\u0001\u0000\u0000\u0000\u001e\u019a\u0001\u0000\u0000\u0000"+ + " \u01a2\u0001\u0000\u0000\u0000\"\u01a9\u0001\u0000\u0000\u0000$\u01b5"+ + "\u0001\u0000\u0000\u0000&\u01be\u0001\u0000\u0000\u0000(\u01c4\u0001\u0000"+ + "\u0000\u0000*\u01cb\u0001\u0000\u0000\u0000,\u01d2\u0001\u0000\u0000\u0000"+ + ".\u01da\u0001\u0000\u0000\u00000\u01e3\u0001\u0000\u0000\u00002\u01e9"+ + "\u0001\u0000\u0000\u00004\u01fa\u0001\u0000\u0000\u00006\u020a\u0001\u0000"+ + "\u0000\u00008\u0210\u0001\u0000\u0000\u0000:\u0215\u0001\u0000\u0000\u0000"+ + "<\u021a\u0001\u0000\u0000\u0000>\u021e\u0001\u0000\u0000\u0000@\u0222"+ + "\u0001\u0000\u0000\u0000B\u0226\u0001\u0000\u0000\u0000D\u022a\u0001\u0000"+ + "\u0000\u0000F\u022c\u0001\u0000\u0000\u0000H\u022e\u0001\u0000\u0000\u0000"+ + "J\u0231\u0001\u0000\u0000\u0000L\u0233\u0001\u0000\u0000\u0000N\u023c"+ + "\u0001\u0000\u0000\u0000P\u023e\u0001\u0000\u0000\u0000R\u0243\u0001\u0000"+ + "\u0000\u0000T\u0245\u0001\u0000\u0000\u0000V\u024a\u0001\u0000\u0000\u0000"+ + "X\u0269\u0001\u0000\u0000\u0000Z\u026c\u0001\u0000\u0000\u0000\\\u029a"+ + "\u0001\u0000\u0000\u0000^\u029c\u0001\u0000\u0000\u0000`\u029f\u0001\u0000"+ + "\u0000\u0000b\u02a3\u0001\u0000\u0000\u0000d\u02a7\u0001\u0000\u0000\u0000"+ + "f\u02a9\u0001\u0000\u0000\u0000h\u02ac\u0001\u0000\u0000\u0000j\u02ae"+ + "\u0001\u0000\u0000\u0000l\u02b3\u0001\u0000\u0000\u0000n\u02b5\u0001\u0000"+ + "\u0000\u0000p\u02bb\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ + "t\u02c6\u0001\u0000\u0000\u0000v\u02c8\u0001\u0000\u0000\u0000x\u02cb"+ + "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d3\u0001\u0000"+ + "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dc\u0001\u0000\u0000"+ + "\u0000\u0082\u02e2\u0001\u0000\u0000\u0000\u0084\u02e5\u0001\u0000\u0000"+ + "\u0000\u0086\u02e7\u0001\u0000\u0000\u0000\u0088\u02ed\u0001\u0000\u0000"+ + "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f4\u0001\u0000\u0000"+ + "\u0000\u008e\u02f7\u0001\u0000\u0000\u0000\u0090\u02fa\u0001\u0000\u0000"+ + "\u0000\u0092\u02fd\u0001\u0000\u0000\u0000\u0094\u02ff\u0001\u0000\u0000"+ + "\u0000\u0096\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000"+ + "\u0000\u009a\u0307\u0001\u0000\u0000\u0000\u009c\u0309\u0001\u0000\u0000"+ + "\u0000\u009e\u030b\u0001\u0000\u0000\u0000\u00a0\u030d\u0001\u0000\u0000"+ + "\u0000\u00a2\u030f\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ + "\u0000\u00a6\u0316\u0001\u0000\u0000\u0000\u00a8\u032b\u0001\u0000\u0000"+ + "\u0000\u00aa\u032d\u0001\u0000\u0000\u0000\u00ac\u0335\u0001\u0000\u0000"+ + "\u0000\u00ae\u0337\u0001\u0000\u0000\u0000\u00b0\u033b\u0001\u0000\u0000"+ + "\u0000\u00b2\u033f\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ + "\u0000\u00b6\u0348\u0001\u0000\u0000\u0000\u00b8\u034c\u0001\u0000\u0000"+ + "\u0000\u00ba\u0350\u0001\u0000\u0000\u0000\u00bc\u0354\u0001\u0000\u0000"+ + "\u0000\u00be\u0358\u0001\u0000\u0000\u0000\u00c0\u035c\u0001\u0000\u0000"+ + "\u0000\u00c2\u0364\u0001\u0000\u0000\u0000\u00c4\u0370\u0001\u0000\u0000"+ + "\u0000\u00c6\u0373\u0001\u0000\u0000\u0000\u00c8\u0377\u0001\u0000\u0000"+ + "\u0000\u00ca\u037b\u0001\u0000\u0000\u0000\u00cc\u037f\u0001\u0000\u0000"+ + "\u0000\u00ce\u0383\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ + "\u0000\u00d2\u038c\u0001\u0000\u0000\u0000\u00d4\u0390\u0001\u0000\u0000"+ + "\u0000\u00d6\u0398\u0001\u0000\u0000\u0000\u00d8\u03ad\u0001\u0000\u0000"+ + "\u0000\u00da\u03b1\u0001\u0000\u0000\u0000\u00dc\u03b5\u0001\u0000\u0000"+ + "\u0000\u00de\u03b9\u0001\u0000\u0000\u0000\u00e0\u03bd\u0001\u0000\u0000"+ + "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c6\u0001\u0000\u0000"+ + "\u0000\u00e6\u03ca\u0001\u0000\u0000\u0000\u00e8\u03ce\u0001\u0000\u0000"+ + "\u0000\u00ea\u03d2\u0001\u0000\u0000\u0000\u00ec\u03d5\u0001\u0000\u0000"+ + "\u0000\u00ee\u03d9\u0001\u0000\u0000\u0000\u00f0\u03dd\u0001\u0000\u0000"+ + "\u0000\u00f2\u03e1\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ - "\u0000\u00fa\u03f6\u0001\u0000\u0000\u0000\u00fc\u03ff\u0001\u0000\u0000"+ - "\u0000\u00fe\u0406\u0001\u0000\u0000\u0000\u0100\u040a\u0001\u0000\u0000"+ - "\u0000\u0102\u040e\u0001\u0000\u0000\u0000\u0104\u0412\u0001\u0000\u0000"+ - "\u0000\u0106\u0416\u0001\u0000\u0000\u0000\u0108\u041a\u0001\u0000\u0000"+ - "\u0000\u010a\u0420\u0001\u0000\u0000\u0000\u010c\u0424\u0001\u0000\u0000"+ - "\u0000\u010e\u0428\u0001\u0000\u0000\u0000\u0110\u042c\u0001\u0000\u0000"+ - "\u0000\u0112\u0430\u0001\u0000\u0000\u0000\u0114\u0434\u0001\u0000\u0000"+ - "\u0000\u0116\u0438\u0001\u0000\u0000\u0000\u0118\u043c\u0001\u0000\u0000"+ - "\u0000\u011a\u0440\u0001\u0000\u0000\u0000\u011c\u0444\u0001\u0000\u0000"+ - "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044d\u0001\u0000\u0000"+ - "\u0000\u0122\u0451\u0001\u0000\u0000\u0000\u0124\u0455\u0001\u0000\u0000"+ - "\u0000\u0126\u0459\u0001\u0000\u0000\u0000\u0128\u045d\u0001\u0000\u0000"+ - "\u0000\u012a\u0461\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ - "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u046f\u0001\u0000\u0000"+ - "\u0000\u0132\u0473\u0001\u0000\u0000\u0000\u0134\u0477\u0001\u0000\u0000"+ - "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0486\u0001\u0000\u0000"+ - "\u0000\u013a\u048a\u0001\u0000\u0000\u0000\u013c\u048e\u0001\u0000\u0000"+ - "\u0000\u013e\u0492\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ - "\u0000\u0142\u049e\u0001\u0000\u0000\u0000\u0144\u04a2\u0001\u0000\u0000"+ - "\u0000\u0146\u04a6\u0001\u0000\u0000\u0000\u0148\u04aa\u0001\u0000\u0000"+ - "\u0000\u014a\u014b\u0005d\u0000\u0000\u014b\u014c\u0005i\u0000\u0000\u014c"+ - "\u014d\u0005s\u0000\u0000\u014d\u014e\u0005s\u0000\u0000\u014e\u014f\u0005"+ - "e\u0000\u0000\u014f\u0150\u0005c\u0000\u0000\u0150\u0151\u0005t\u0000"+ - "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0153\u0006\u0000\u0000"+ - "\u0000\u0153\r\u0001\u0000\u0000\u0000\u0154\u0155\u0005d\u0000\u0000"+ - "\u0155\u0156\u0005r\u0000\u0000\u0156\u0157\u0005o\u0000\u0000\u0157\u0158"+ - "\u0005p\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015a\u0006"+ - "\u0001\u0001\u0000\u015a\u000f\u0001\u0000\u0000\u0000\u015b\u015c\u0005"+ - "e\u0000\u0000\u015c\u015d\u0005n\u0000\u0000\u015d\u015e\u0005r\u0000"+ - "\u0000\u015e\u015f\u0005i\u0000\u0000\u015f\u0160\u0005c\u0000\u0000\u0160"+ - "\u0161\u0005h\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162\u0163"+ - "\u0006\u0002\u0002\u0000\u0163\u0011\u0001\u0000\u0000\u0000\u0164\u0165"+ - "\u0005e\u0000\u0000\u0165\u0166\u0005v\u0000\u0000\u0166\u0167\u0005a"+ - "\u0000\u0000\u0167\u0168\u0005l\u0000\u0000\u0168\u0169\u0001\u0000\u0000"+ - "\u0000\u0169\u016a\u0006\u0003\u0000\u0000\u016a\u0013\u0001\u0000\u0000"+ - "\u0000\u016b\u016c\u0005e\u0000\u0000\u016c\u016d\u0005x\u0000\u0000\u016d"+ - "\u016e\u0005p\u0000\u0000\u016e\u016f\u0005l\u0000\u0000\u016f\u0170\u0005"+ - "a\u0000\u0000\u0170\u0171\u0005i\u0000\u0000\u0171\u0172\u0005n\u0000"+ - "\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0174\u0006\u0004\u0003"+ - "\u0000\u0174\u0015\u0001\u0000\u0000\u0000\u0175\u0176\u0005f\u0000\u0000"+ - "\u0176\u0177\u0005r\u0000\u0000\u0177\u0178\u0005o\u0000\u0000\u0178\u0179"+ - "\u0005m\u0000\u0000\u0179\u017a\u0001\u0000\u0000\u0000\u017a\u017b\u0006"+ - "\u0005\u0004\u0000\u017b\u0017\u0001\u0000\u0000\u0000\u017c\u017d\u0005"+ - "g\u0000\u0000\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005o\u0000"+ - "\u0000\u017f\u0180\u0005k\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000"+ - "\u0181\u0182\u0006\u0006\u0000\u0000\u0182\u0019\u0001\u0000\u0000\u0000"+ - "\u0183\u0184\u0005i\u0000\u0000\u0184\u0185\u0005n\u0000\u0000\u0185\u0186"+ - "\u0005l\u0000\u0000\u0186\u0187\u0005i\u0000\u0000\u0187\u0188\u0005n"+ - "\u0000\u0000\u0188\u0189\u0005e\u0000\u0000\u0189\u018a\u0005s\u0000\u0000"+ - "\u018a\u018b\u0005t\u0000\u0000\u018b\u018c\u0005a\u0000\u0000\u018c\u018d"+ - "\u0005t\u0000\u0000\u018d\u018e\u0005s\u0000\u0000\u018e\u018f\u0001\u0000"+ - "\u0000\u0000\u018f\u0190\u0006\u0007\u0000\u0000\u0190\u001b\u0001\u0000"+ - "\u0000\u0000\u0191\u0192\u0005k\u0000\u0000\u0192\u0193\u0005e\u0000\u0000"+ - "\u0193\u0194\u0005e\u0000\u0000\u0194\u0195\u0005p\u0000\u0000\u0195\u0196"+ - "\u0001\u0000\u0000\u0000\u0196\u0197\u0006\b\u0001\u0000\u0197\u001d\u0001"+ - "\u0000\u0000\u0000\u0198\u0199\u0005l\u0000\u0000\u0199\u019a\u0005i\u0000"+ - "\u0000\u019a\u019b\u0005m\u0000\u0000\u019b\u019c\u0005i\u0000\u0000\u019c"+ - "\u019d\u0005t\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u019f"+ - "\u0006\t\u0000\u0000\u019f\u001f\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005"+ - "m\u0000\u0000\u01a1\u01a2\u0005e\u0000\u0000\u01a2\u01a3\u0005t\u0000"+ - "\u0000\u01a3\u01a4\u0005a\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a6\u0006\n\u0005\u0000\u01a6!\u0001\u0000\u0000\u0000\u01a7"+ - "\u01a8\u0005m\u0000\u0000\u01a8\u01a9\u0005v\u0000\u0000\u01a9\u01aa\u0005"+ - "_\u0000\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac\u0005x\u0000"+ - "\u0000\u01ac\u01ad\u0005p\u0000\u0000\u01ad\u01ae\u0005a\u0000\u0000\u01ae"+ - "\u01af\u0005n\u0000\u0000\u01af\u01b0\u0005d\u0000\u0000\u01b0\u01b1\u0001"+ - "\u0000\u0000\u0000\u01b1\u01b2\u0006\u000b\u0006\u0000\u01b2#\u0001\u0000"+ - "\u0000\u0000\u01b3\u01b4\u0005r\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000"+ - "\u01b5\u01b6\u0005n\u0000\u0000\u01b6\u01b7\u0005a\u0000\u0000\u01b7\u01b8"+ - "\u0005m\u0000\u0000\u01b8\u01b9\u0005e\u0000\u0000\u01b9\u01ba\u0001\u0000"+ - "\u0000\u0000\u01ba\u01bb\u0006\f\u0007\u0000\u01bb%\u0001\u0000\u0000"+ - "\u0000\u01bc\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005o\u0000\u0000\u01be"+ - "\u01bf\u0005w\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1"+ - "\u0006\r\u0000\u0000\u01c1\'\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005"+ - "s\u0000\u0000\u01c3\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005o\u0000"+ - "\u0000\u01c5\u01c6\u0005w\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000"+ - "\u01c7\u01c8\u0006\u000e\b\u0000\u01c8)\u0001\u0000\u0000\u0000\u01c9"+ - "\u01ca\u0005s\u0000\u0000\u01ca\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005"+ - "r\u0000\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000"+ - "\u0000\u01ce\u01cf\u0006\u000f\u0000\u0000\u01cf+\u0001\u0000\u0000\u0000"+ - "\u01d0\u01d1\u0005s\u0000\u0000\u01d1\u01d2\u0005t\u0000\u0000\u01d2\u01d3"+ - "\u0005a\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000\u01d4\u01d5\u0005s"+ - "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0010"+ - "\u0000\u0000\u01d7-\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005w\u0000\u0000"+ - "\u01d9\u01da\u0005h\u0000\u0000\u01da\u01db\u0005e\u0000\u0000\u01db\u01dc"+ - "\u0005r\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0001\u0000"+ - "\u0000\u0000\u01de\u01df\u0006\u0011\u0000\u0000\u01df/\u0001\u0000\u0000"+ - "\u0000\u01e0\u01e2\b\u0000\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000\u0000"+ - "\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000"+ - "\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000"+ - "\u01e5\u01e6\u0006\u0012\u0000\u0000\u01e61\u0001\u0000\u0000\u0000\u01e7"+ - "\u01e8\u0005/\u0000\u0000\u01e8\u01e9\u0005/\u0000\u0000\u01e9\u01ed\u0001"+ - "\u0000\u0000\u0000\u01ea\u01ec\b\u0001\u0000\u0000\u01eb\u01ea\u0001\u0000"+ - "\u0000\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000"+ - "\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000"+ - "\u0000\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f2\u0005\r\u0000"+ - "\u0000\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000"+ - "\u0000\u01f2\u01f4\u0001\u0000\u0000\u0000\u01f3\u01f5\u0005\n\u0000\u0000"+ - "\u01f4\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ - "\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0013\t\u0000\u01f7"+ - "3\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005/\u0000\u0000\u01f9\u01fa\u0005"+ - "*\u0000\u0000\u01fa\u01ff\u0001\u0000\u0000\u0000\u01fb\u01fe\u00034\u0014"+ - "\u0000\u01fc\u01fe\t\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ - "\u01fd\u01fc\u0001\u0000\u0000\u0000\u01fe\u0201\u0001\u0000\u0000\u0000"+ - "\u01ff\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000\u0000"+ - "\u0200\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ - "\u0202\u0203\u0005*\u0000\u0000\u0203\u0204\u0005/\u0000\u0000\u0204\u0205"+ - "\u0001\u0000\u0000\u0000\u0205\u0206\u0006\u0014\t\u0000\u02065\u0001"+ - "\u0000\u0000\u0000\u0207\u0209\u0007\u0002\u0000\u0000\u0208\u0207\u0001"+ - "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u0208\u0001"+ - "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\u0015\t\u0000\u020d7\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0003\u00a2K\u0000\u020f\u0210\u0001\u0000\u0000"+ - "\u0000\u0210\u0211\u0006\u0016\n\u0000\u0211\u0212\u0006\u0016\u000b\u0000"+ - "\u02129\u0001\u0000\u0000\u0000\u0213\u0214\u0003B\u001b\u0000\u0214\u0215"+ - "\u0001\u0000\u0000\u0000\u0215\u0216\u0006\u0017\f\u0000\u0216\u0217\u0006"+ - "\u0017\r\u0000\u0217;\u0001\u0000\u0000\u0000\u0218\u0219\u00036\u0015"+ - "\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0018\t\u0000"+ - "\u021b=\u0001\u0000\u0000\u0000\u021c\u021d\u00032\u0013\u0000\u021d\u021e"+ - "\u0001\u0000\u0000\u0000\u021e\u021f\u0006\u0019\t\u0000\u021f?\u0001"+ - "\u0000\u0000\u0000\u0220\u0221\u00034\u0014\u0000\u0221\u0222\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0006\u001a\t\u0000\u0223A\u0001\u0000\u0000"+ - "\u0000\u0224\u0225\u0005|\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000"+ - "\u0226\u0227\u0006\u001b\r\u0000\u0227C\u0001\u0000\u0000\u0000\u0228"+ - "\u0229\u0007\u0003\u0000\u0000\u0229E\u0001\u0000\u0000\u0000\u022a\u022b"+ - "\u0007\u0004\u0000\u0000\u022bG\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ - "\\\u0000\u0000\u022d\u022e\u0007\u0005\u0000\u0000\u022eI\u0001\u0000"+ - "\u0000\u0000\u022f\u0230\b\u0006\u0000\u0000\u0230K\u0001\u0000\u0000"+ - "\u0000\u0231\u0233\u0007\u0007\u0000\u0000\u0232\u0234\u0007\b\u0000\u0000"+ - "\u0233\u0232\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ - "\u0234\u0236\u0001\u0000\u0000\u0000\u0235\u0237\u0003D\u001c\u0000\u0236"+ - "\u0235\u0001\u0000\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238"+ - "\u0236\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ - "M\u0001\u0000\u0000\u0000\u023a\u023b\u0005@\u0000\u0000\u023bO\u0001"+ - "\u0000\u0000\u0000\u023c\u023d\u0005`\u0000\u0000\u023dQ\u0001\u0000\u0000"+ - "\u0000\u023e\u0242\b\t\u0000\u0000\u023f\u0240\u0005`\u0000\u0000\u0240"+ - "\u0242\u0005`\u0000\u0000\u0241\u023e\u0001\u0000\u0000\u0000\u0241\u023f"+ - "\u0001\u0000\u0000\u0000\u0242S\u0001\u0000\u0000\u0000\u0243\u0244\u0005"+ - "_\u0000\u0000\u0244U\u0001\u0000\u0000\u0000\u0245\u0249\u0003F\u001d"+ - "\u0000\u0246\u0249\u0003D\u001c\u0000\u0247\u0249\u0003T$\u0000\u0248"+ - "\u0245\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0248"+ - "\u0247\u0001\u0000\u0000\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024f"+ - "\u0005\"\u0000\u0000\u024b\u024e\u0003H\u001e\u0000\u024c\u024e\u0003"+ - "J\u001f\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024d\u024c\u0001\u0000"+ - "\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000"+ - "\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000"+ - "\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0268\u0005\"\u0000"+ - "\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255\u0005\"\u0000\u0000"+ - "\u0255\u0256\u0005\"\u0000\u0000\u0256\u025a\u0001\u0000\u0000\u0000\u0257"+ - "\u0259\b\u0001\u0000\u0000\u0258\u0257\u0001\u0000\u0000\u0000\u0259\u025c"+ - "\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025a\u0258"+ - "\u0001\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025a"+ - "\u0001\u0000\u0000\u0000\u025d\u025e\u0005\"\u0000\u0000\u025e\u025f\u0005"+ - "\"\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0262\u0001\u0000"+ - "\u0000\u0000\u0261\u0263\u0005\"\u0000\u0000\u0262\u0261\u0001\u0000\u0000"+ - "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0265\u0001\u0000\u0000"+ - "\u0000\u0264\u0266\u0005\"\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000"+ - "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0268\u0001\u0000\u0000\u0000"+ - "\u0267\u024a\u0001\u0000\u0000\u0000\u0267\u0253\u0001\u0000\u0000\u0000"+ - "\u0268Y\u0001\u0000\u0000\u0000\u0269\u026b\u0003D\u001c\u0000\u026a\u0269"+ - "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d[\u0001"+ - "\u0000\u0000\u0000\u026e\u0270\u0003D\u001c\u0000\u026f\u026e\u0001\u0000"+ - "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000"+ - "\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0001\u0000"+ - "\u0000\u0000\u0273\u0277\u0003j/\u0000\u0274\u0276\u0003D\u001c\u0000"+ - "\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000\u0000\u0000"+ - "\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000"+ - "\u0278\u0299\u0001\u0000\u0000\u0000\u0279\u0277\u0001\u0000\u0000\u0000"+ - "\u027a\u027c\u0003j/\u0000\u027b\u027d\u0003D\u001c\u0000\u027c\u027b"+ - "\u0001\u0000\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027c"+ - "\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u0299"+ - "\u0001\u0000\u0000\u0000\u0280\u0282\u0003D\u001c\u0000\u0281\u0280\u0001"+ - "\u0000\u0000\u0000\u0282\u0283\u0001\u0000\u0000\u0000\u0283\u0281\u0001"+ - "\u0000\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u028c\u0001"+ - "\u0000\u0000\u0000\u0285\u0289\u0003j/\u0000\u0286\u0288\u0003D\u001c"+ - "\u0000\u0287\u0286\u0001\u0000\u0000\u0000\u0288\u028b\u0001\u0000\u0000"+ - "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000"+ - "\u0000\u028a\u028d\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000"+ - "\u0000\u028c\u0285\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000"+ - "\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028f\u0003L \u0000\u028f"+ - "\u0299\u0001\u0000\u0000\u0000\u0290\u0292\u0003j/\u0000\u0291\u0293\u0003"+ - "D\u001c\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293\u0294\u0001\u0000"+ - "\u0000\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000"+ - "\u0000\u0000\u0295\u0296\u0001\u0000\u0000\u0000\u0296\u0297\u0003L \u0000"+ - "\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u026f\u0001\u0000\u0000\u0000"+ - "\u0298\u027a\u0001\u0000\u0000\u0000\u0298\u0281\u0001\u0000\u0000\u0000"+ - "\u0298\u0290\u0001\u0000\u0000\u0000\u0299]\u0001\u0000\u0000\u0000\u029a"+ - "\u029b\u0005b\u0000\u0000\u029b\u029c\u0005y\u0000\u0000\u029c_\u0001"+ - "\u0000\u0000\u0000\u029d\u029e\u0005a\u0000\u0000\u029e\u029f\u0005n\u0000"+ - "\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0a\u0001\u0000\u0000\u0000\u02a1"+ - "\u02a2\u0005a\u0000\u0000\u02a2\u02a3\u0005s\u0000\u0000\u02a3\u02a4\u0005"+ - "c\u0000\u0000\u02a4c\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005=\u0000"+ - "\u0000\u02a6e\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005,\u0000\u0000\u02a8"+ - "g\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005d\u0000\u0000\u02aa\u02ab\u0005"+ - "e\u0000\u0000\u02ab\u02ac\u0005s\u0000\u0000\u02ac\u02ad\u0005c\u0000"+ - "\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005.\u0000\u0000\u02af"+ - "k\u0001\u0000\u0000\u0000\u02b0\u02b1\u0005f\u0000\u0000\u02b1\u02b2\u0005"+ - "a\u0000\u0000\u02b2\u02b3\u0005l\u0000\u0000\u02b3\u02b4\u0005s\u0000"+ - "\u0000\u02b4\u02b5\u0005e\u0000\u0000\u02b5m\u0001\u0000\u0000\u0000\u02b6"+ - "\u02b7\u0005f\u0000\u0000\u02b7\u02b8\u0005i\u0000\u0000\u02b8\u02b9\u0005"+ - "r\u0000\u0000\u02b9\u02ba\u0005s\u0000\u0000\u02ba\u02bb\u0005t\u0000"+ - "\u0000\u02bbo\u0001\u0000\u0000\u0000\u02bc\u02bd\u0005l\u0000\u0000\u02bd"+ - "\u02be\u0005a\u0000\u0000\u02be\u02bf\u0005s\u0000\u0000\u02bf\u02c0\u0005"+ - "t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1\u02c2\u0005(\u0000"+ - "\u0000\u02c2s\u0001\u0000\u0000\u0000\u02c3\u02c4\u0005i\u0000\u0000\u02c4"+ - "\u02c5\u0005n\u0000\u0000\u02c5u\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005"+ - "i\u0000\u0000\u02c7\u02c8\u0005s\u0000\u0000\u02c8w\u0001\u0000\u0000"+ - "\u0000\u02c9\u02ca\u0005l\u0000\u0000\u02ca\u02cb\u0005i\u0000\u0000\u02cb"+ - "\u02cc\u0005k\u0000\u0000\u02cc\u02cd\u0005e\u0000\u0000\u02cdy\u0001"+ - "\u0000\u0000\u0000\u02ce\u02cf\u0005n\u0000\u0000\u02cf\u02d0\u0005o\u0000"+ - "\u0000\u02d0\u02d1\u0005t\u0000\u0000\u02d1{\u0001\u0000\u0000\u0000\u02d2"+ - "\u02d3\u0005n\u0000\u0000\u02d3\u02d4\u0005u\u0000\u0000\u02d4\u02d5\u0005"+ - "l\u0000\u0000\u02d5\u02d6\u0005l\u0000\u0000\u02d6}\u0001\u0000\u0000"+ - "\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005u\u0000\u0000\u02d9"+ - "\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000\u0000\u02db\u02dc\u0005"+ - "s\u0000\u0000\u02dc\u007f\u0001\u0000\u0000\u0000\u02dd\u02de\u0005o\u0000"+ - "\u0000\u02de\u02df\u0005r\u0000\u0000\u02df\u0081\u0001\u0000\u0000\u0000"+ - "\u02e0\u02e1\u0005?\u0000\u0000\u02e1\u0083\u0001\u0000\u0000\u0000\u02e2"+ - "\u02e3\u0005r\u0000\u0000\u02e3\u02e4\u0005l\u0000\u0000\u02e4\u02e5\u0005"+ - "i\u0000\u0000\u02e5\u02e6\u0005k\u0000\u0000\u02e6\u02e7\u0005e\u0000"+ - "\u0000\u02e7\u0085\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005)\u0000\u0000"+ - "\u02e9\u0087\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005t\u0000\u0000\u02eb"+ - "\u02ec\u0005r\u0000\u0000\u02ec\u02ed\u0005u\u0000\u0000\u02ed\u02ee\u0005"+ - "e\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005=\u0000"+ - "\u0000\u02f0\u02f1\u0005=\u0000\u0000\u02f1\u008b\u0001\u0000\u0000\u0000"+ - "\u02f2\u02f3\u0005=\u0000\u0000\u02f3\u02f4\u0005~\u0000\u0000\u02f4\u008d"+ - "\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005!\u0000\u0000\u02f6\u02f7\u0005"+ - "=\u0000\u0000\u02f7\u008f\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005<\u0000"+ - "\u0000\u02f9\u0091\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005<\u0000\u0000"+ - "\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0093\u0001\u0000\u0000\u0000\u02fd"+ - "\u02fe\u0005>\u0000\u0000\u02fe\u0095\u0001\u0000\u0000\u0000\u02ff\u0300"+ - "\u0005>\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301\u0097\u0001\u0000"+ - "\u0000\u0000\u0302\u0303\u0005+\u0000\u0000\u0303\u0099\u0001\u0000\u0000"+ - "\u0000\u0304\u0305\u0005-\u0000\u0000\u0305\u009b\u0001\u0000\u0000\u0000"+ - "\u0306\u0307\u0005*\u0000\u0000\u0307\u009d\u0001\u0000\u0000\u0000\u0308"+ - "\u0309\u0005/\u0000\u0000\u0309\u009f\u0001\u0000\u0000\u0000\u030a\u030b"+ - "\u0005%\u0000\u0000\u030b\u00a1\u0001\u0000\u0000\u0000\u030c\u030d\u0005"+ - "[\u0000\u0000\u030d\u030e\u0001\u0000\u0000\u0000\u030e\u030f\u0006K\u0000"+ - "\u0000\u030f\u0310\u0006K\u0000\u0000\u0310\u00a3\u0001\u0000\u0000\u0000"+ - "\u0311\u0312\u0005]\u0000\u0000\u0312\u0313\u0001\u0000\u0000\u0000\u0313"+ - "\u0314\u0006L\r\u0000\u0314\u0315\u0006L\r\u0000\u0315\u00a5\u0001\u0000"+ - "\u0000\u0000\u0316\u031a\u0003F\u001d\u0000\u0317\u0319\u0003V%\u0000"+ - "\u0318\u0317\u0001\u0000\u0000\u0000\u0319\u031c\u0001\u0000\u0000\u0000"+ - "\u031a\u0318\u0001\u0000\u0000\u0000\u031a\u031b\u0001\u0000\u0000\u0000"+ - "\u031b\u0327\u0001\u0000\u0000\u0000\u031c\u031a\u0001\u0000\u0000\u0000"+ - "\u031d\u0320\u0003T$\u0000\u031e\u0320\u0003N!\u0000\u031f\u031d\u0001"+ - "\u0000\u0000\u0000\u031f\u031e\u0001\u0000\u0000\u0000\u0320\u0322\u0001"+ - "\u0000\u0000\u0000\u0321\u0323\u0003V%\u0000\u0322\u0321\u0001\u0000\u0000"+ - "\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0322\u0001\u0000\u0000"+ - "\u0000\u0324\u0325\u0001\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000"+ - "\u0000\u0326\u0316\u0001\u0000\u0000\u0000\u0326\u031f\u0001\u0000\u0000"+ - "\u0000\u0327\u00a7\u0001\u0000\u0000\u0000\u0328\u032a\u0003P\"\u0000"+ - "\u0329\u032b\u0003R#\u0000\u032a\u0329\u0001\u0000\u0000\u0000\u032b\u032c"+ - "\u0001\u0000\u0000\u0000\u032c\u032a\u0001\u0000\u0000\u0000\u032c\u032d"+ - "\u0001\u0000\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f"+ - "\u0003P\"\u0000\u032f\u00a9\u0001\u0000\u0000\u0000\u0330\u0331\u0003"+ - "\u00a8N\u0000\u0331\u00ab\u0001\u0000\u0000\u0000\u0332\u0333\u00032\u0013"+ - "\u0000\u0333\u0334\u0001\u0000\u0000\u0000\u0334\u0335\u0006P\t\u0000"+ - "\u0335\u00ad\u0001\u0000\u0000\u0000\u0336\u0337\u00034\u0014\u0000\u0337"+ - "\u0338\u0001\u0000\u0000\u0000\u0338\u0339\u0006Q\t\u0000\u0339\u00af"+ - "\u0001\u0000\u0000\u0000\u033a\u033b\u00036\u0015\u0000\u033b\u033c\u0001"+ - "\u0000\u0000\u0000\u033c\u033d\u0006R\t\u0000\u033d\u00b1\u0001\u0000"+ - "\u0000\u0000\u033e\u033f\u0003B\u001b\u0000\u033f\u0340\u0001\u0000\u0000"+ - "\u0000\u0340\u0341\u0006S\f\u0000\u0341\u0342\u0006S\r\u0000\u0342\u00b3"+ - "\u0001\u0000\u0000\u0000\u0343\u0344\u0003\u00a2K\u0000\u0344\u0345\u0001"+ - "\u0000\u0000\u0000\u0345\u0346\u0006T\n\u0000\u0346\u00b5\u0001\u0000"+ - "\u0000\u0000\u0347\u0348\u0003\u00a4L\u0000\u0348\u0349\u0001\u0000\u0000"+ - "\u0000\u0349\u034a\u0006U\u000e\u0000\u034a\u00b7\u0001\u0000\u0000\u0000"+ - "\u034b\u034c\u0003f-\u0000\u034c\u034d\u0001\u0000\u0000\u0000\u034d\u034e"+ - "\u0006V\u000f\u0000\u034e\u00b9\u0001\u0000\u0000\u0000\u034f\u0350\u0003"+ - "d,\u0000\u0350\u0351\u0001\u0000\u0000\u0000\u0351\u0352\u0006W\u0010"+ - "\u0000\u0352\u00bb\u0001\u0000\u0000\u0000\u0353\u0354\u0003X&\u0000\u0354"+ - "\u0355\u0001\u0000\u0000\u0000\u0355\u0356\u0006X\u0011\u0000\u0356\u00bd"+ - "\u0001\u0000\u0000\u0000\u0357\u0358\u0005o\u0000\u0000\u0358\u0359\u0005"+ - "p\u0000\u0000\u0359\u035a\u0005t\u0000\u0000\u035a\u035b\u0005i\u0000"+ - "\u0000\u035b\u035c\u0005o\u0000\u0000\u035c\u035d\u0005n\u0000\u0000\u035d"+ - "\u035e\u0005s\u0000\u0000\u035e\u00bf\u0001\u0000\u0000\u0000\u035f\u0360"+ - "\u0005m\u0000\u0000\u0360\u0361\u0005e\u0000\u0000\u0361\u0362\u0005t"+ - "\u0000\u0000\u0362\u0363\u0005a\u0000\u0000\u0363\u0364\u0005d\u0000\u0000"+ - "\u0364\u0365\u0005a\u0000\u0000\u0365\u0366\u0005t\u0000\u0000\u0366\u0367"+ - "\u0005a\u0000\u0000\u0367\u00c1\u0001\u0000\u0000\u0000\u0368\u036c\b"+ - "\n\u0000\u0000\u0369\u036a\u0005/\u0000\u0000\u036a\u036c\b\u000b\u0000"+ - "\u0000\u036b\u0368\u0001\u0000\u0000\u0000\u036b\u0369\u0001\u0000\u0000"+ - "\u0000\u036c\u00c3\u0001\u0000\u0000\u0000\u036d\u036f\u0003\u00c2[\u0000"+ - "\u036e\u036d\u0001\u0000\u0000\u0000\u036f\u0370\u0001\u0000\u0000\u0000"+ - "\u0370\u036e\u0001\u0000\u0000\u0000\u0370\u0371\u0001\u0000\u0000\u0000"+ - "\u0371\u00c5\u0001\u0000\u0000\u0000\u0372\u0373\u0003\u00aaO\u0000\u0373"+ - "\u0374\u0001\u0000\u0000\u0000\u0374\u0375\u0006]\u0012\u0000\u0375\u00c7"+ - "\u0001\u0000\u0000\u0000\u0376\u0377\u00032\u0013\u0000\u0377\u0378\u0001"+ - "\u0000\u0000\u0000\u0378\u0379\u0006^\t\u0000\u0379\u00c9\u0001\u0000"+ - "\u0000\u0000\u037a\u037b\u00034\u0014\u0000\u037b\u037c\u0001\u0000\u0000"+ - "\u0000\u037c\u037d\u0006_\t\u0000\u037d\u00cb\u0001\u0000\u0000\u0000"+ - "\u037e\u037f\u00036\u0015\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380"+ - "\u0381\u0006`\t\u0000\u0381\u00cd\u0001\u0000\u0000\u0000\u0382\u0383"+ - "\u0003B\u001b\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u0385\u0006"+ - "a\f\u0000\u0385\u0386\u0006a\r\u0000\u0386\u00cf\u0001\u0000\u0000\u0000"+ - "\u0387\u0388\u0003j/\u0000\u0388\u0389\u0001\u0000\u0000\u0000\u0389\u038a"+ - "\u0006b\u0013\u0000\u038a\u00d1\u0001\u0000\u0000\u0000\u038b\u038c\u0003"+ - "f-\u0000\u038c\u038d\u0001\u0000\u0000\u0000\u038d\u038e\u0006c\u000f"+ - "\u0000\u038e\u00d3\u0001\u0000\u0000\u0000\u038f\u0394\u0003F\u001d\u0000"+ - "\u0390\u0394\u0003D\u001c\u0000\u0391\u0394\u0003T$\u0000\u0392\u0394"+ - "\u0003\u009cH\u0000\u0393\u038f\u0001\u0000\u0000\u0000\u0393\u0390\u0001"+ - "\u0000\u0000\u0000\u0393\u0391\u0001\u0000\u0000\u0000\u0393\u0392\u0001"+ - "\u0000\u0000\u0000\u0394\u00d5\u0001\u0000\u0000\u0000\u0395\u0398\u0003"+ - "F\u001d\u0000\u0396\u0398\u0003\u009cH\u0000\u0397\u0395\u0001\u0000\u0000"+ - "\u0000\u0397\u0396\u0001\u0000\u0000\u0000\u0398\u039c\u0001\u0000\u0000"+ - "\u0000\u0399\u039b\u0003\u00d4d\u0000\u039a\u0399\u0001\u0000\u0000\u0000"+ - "\u039b\u039e\u0001\u0000\u0000\u0000\u039c\u039a\u0001\u0000\u0000\u0000"+ - "\u039c\u039d\u0001\u0000\u0000\u0000\u039d\u03a9\u0001\u0000\u0000\u0000"+ - "\u039e\u039c\u0001\u0000\u0000\u0000\u039f\u03a2\u0003T$\u0000\u03a0\u03a2"+ - "\u0003N!\u0000\u03a1\u039f\u0001\u0000\u0000\u0000\u03a1\u03a0\u0001\u0000"+ - "\u0000\u0000\u03a2\u03a4\u0001\u0000\u0000\u0000\u03a3\u03a5\u0003\u00d4"+ - "d\u0000\u03a4\u03a3\u0001\u0000\u0000\u0000\u03a5\u03a6\u0001\u0000\u0000"+ - "\u0000\u03a6\u03a4\u0001\u0000\u0000\u0000\u03a6\u03a7\u0001\u0000\u0000"+ - "\u0000\u03a7\u03a9\u0001\u0000\u0000\u0000\u03a8\u0397\u0001\u0000\u0000"+ - "\u0000\u03a8\u03a1\u0001\u0000\u0000\u0000\u03a9\u00d7\u0001\u0000\u0000"+ - "\u0000\u03aa\u03ad\u0003\u00d6e\u0000\u03ab\u03ad\u0003\u00a8N\u0000\u03ac"+ - "\u03aa\u0001\u0000\u0000\u0000\u03ac\u03ab\u0001\u0000\u0000\u0000\u03ad"+ - "\u03ae\u0001\u0000\u0000\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae"+ - "\u03af\u0001\u0000\u0000\u0000\u03af\u00d9\u0001\u0000\u0000\u0000\u03b0"+ - "\u03b1\u00032\u0013\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3"+ - "\u0006g\t\u0000\u03b3\u00db\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003"+ - "4\u0014\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006h\t"+ - "\u0000\u03b7\u00dd\u0001\u0000\u0000\u0000\u03b8\u03b9\u00036\u0015\u0000"+ - "\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006i\t\u0000\u03bb"+ - "\u00df\u0001\u0000\u0000\u0000\u03bc\u03bd\u0003B\u001b\u0000\u03bd\u03be"+ - "\u0001\u0000\u0000\u0000\u03be\u03bf\u0006j\f\u0000\u03bf\u03c0\u0006"+ - "j\r\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2\u0003d,\u0000"+ - "\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006k\u0010\u0000\u03c4"+ - "\u00e3\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003f-\u0000\u03c6\u03c7\u0001"+ - "\u0000\u0000\u0000\u03c7\u03c8\u0006l\u000f\u0000\u03c8\u00e5\u0001\u0000"+ - "\u0000\u0000\u03c9\u03ca\u0003j/\u0000\u03ca\u03cb\u0001\u0000\u0000\u0000"+ - "\u03cb\u03cc\u0006m\u0013\u0000\u03cc\u00e7\u0001\u0000\u0000\u0000\u03cd"+ - "\u03ce\u0005a\u0000\u0000\u03ce\u03cf\u0005s\u0000\u0000\u03cf\u00e9\u0001"+ - "\u0000\u0000\u0000\u03d0\u03d1\u0003\u00d8f\u0000\u03d1\u03d2\u0001\u0000"+ - "\u0000\u0000\u03d2\u03d3\u0006o\u0014\u0000\u03d3\u00eb\u0001\u0000\u0000"+ - "\u0000\u03d4\u03d5\u00032\u0013\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ - "\u03d6\u03d7\u0006p\t\u0000\u03d7\u00ed\u0001\u0000\u0000\u0000\u03d8"+ - "\u03d9\u00034\u0014\u0000\u03d9\u03da\u0001\u0000\u0000\u0000\u03da\u03db"+ - "\u0006q\t\u0000\u03db\u00ef\u0001\u0000\u0000\u0000\u03dc\u03dd\u0003"+ - "6\u0015\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03df\u0006r\t"+ - "\u0000\u03df\u00f1\u0001\u0000\u0000\u0000\u03e0\u03e1\u0003B\u001b\u0000"+ - "\u03e1\u03e2\u0001\u0000\u0000\u0000\u03e2\u03e3\u0006s\f\u0000\u03e3"+ - "\u03e4\u0006s\r\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000\u03e5\u03e6"+ - "\u0003\u00a2K\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006"+ - "t\n\u0000\u03e8\u03e9\u0006t\u0015\u0000\u03e9\u00f5\u0001\u0000\u0000"+ - "\u0000\u03ea\u03eb\u0005o\u0000\u0000\u03eb\u03ec\u0005n\u0000\u0000\u03ec"+ - "\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0006u\u0016\u0000\u03ee\u00f7"+ - "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005w\u0000\u0000\u03f0\u03f1\u0005"+ - "i\u0000\u0000\u03f1\u03f2\u0005t\u0000\u0000\u03f2\u03f3\u0005h\u0000"+ - "\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006v\u0016\u0000"+ - "\u03f5\u00f9\u0001\u0000\u0000\u0000\u03f6\u03f7\b\f\u0000\u0000\u03f7"+ - "\u00fb\u0001\u0000\u0000\u0000\u03f8\u03fa\u0003\u00faw\u0000\u03f9\u03f8"+ - "\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03f9"+ - "\u0001\u0000\u0000\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd"+ - "\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003\u0140\u009a\u0000\u03fe\u0400"+ - "\u0001\u0000\u0000\u0000\u03ff\u03f9\u0001\u0000\u0000\u0000\u03ff\u0400"+ - "\u0001\u0000\u0000\u0000\u0400\u0402\u0001\u0000\u0000\u0000\u0401\u0403"+ - "\u0003\u00faw\u0000\u0402\u0401\u0001\u0000\u0000\u0000\u0403\u0404\u0001"+ - "\u0000\u0000\u0000\u0404\u0402\u0001\u0000\u0000\u0000\u0404\u0405\u0001"+ - "\u0000\u0000\u0000\u0405\u00fd\u0001\u0000\u0000\u0000\u0406\u0407\u0003"+ - "\u00aaO\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006y\u0012"+ - "\u0000\u0409\u00ff\u0001\u0000\u0000\u0000\u040a\u040b\u0003\u00fcx\u0000"+ - "\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006z\u0017\u0000\u040d"+ - "\u0101\u0001\u0000\u0000\u0000\u040e\u040f\u00032\u0013\u0000\u040f\u0410"+ - "\u0001\u0000\u0000\u0000\u0410\u0411\u0006{\t\u0000\u0411\u0103\u0001"+ - "\u0000\u0000\u0000\u0412\u0413\u00034\u0014\u0000\u0413\u0414\u0001\u0000"+ - "\u0000\u0000\u0414\u0415\u0006|\t\u0000\u0415\u0105\u0001\u0000\u0000"+ - "\u0000\u0416\u0417\u00036\u0015\u0000\u0417\u0418\u0001\u0000\u0000\u0000"+ - "\u0418\u0419\u0006}\t\u0000\u0419\u0107\u0001\u0000\u0000\u0000\u041a"+ - "\u041b\u0003B\u001b\u0000\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d"+ - "\u0006~\f\u0000\u041d\u041e\u0006~\r\u0000\u041e\u041f\u0006~\r\u0000"+ - "\u041f\u0109\u0001\u0000\u0000\u0000\u0420\u0421\u0003d,\u0000\u0421\u0422"+ - "\u0001\u0000\u0000\u0000\u0422\u0423\u0006\u007f\u0010\u0000\u0423\u010b"+ - "\u0001\u0000\u0000\u0000\u0424\u0425\u0003f-\u0000\u0425\u0426\u0001\u0000"+ - "\u0000\u0000\u0426\u0427\u0006\u0080\u000f\u0000\u0427\u010d\u0001\u0000"+ - "\u0000\u0000\u0428\u0429\u0003j/\u0000\u0429\u042a\u0001\u0000\u0000\u0000"+ - "\u042a\u042b\u0006\u0081\u0013\u0000\u042b\u010f\u0001\u0000\u0000\u0000"+ - "\u042c\u042d\u0003\u00f8v\u0000\u042d\u042e\u0001\u0000\u0000\u0000\u042e"+ - "\u042f\u0006\u0082\u0018\u0000\u042f\u0111\u0001\u0000\u0000\u0000\u0430"+ - "\u0431\u0003\u00d8f\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0433"+ - "\u0006\u0083\u0014\u0000\u0433\u0113\u0001\u0000\u0000\u0000\u0434\u0435"+ - "\u0003\u00aaO\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ - "\u0084\u0012\u0000\u0437\u0115\u0001\u0000\u0000\u0000\u0438\u0439\u0003"+ - "2\u0013\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a\u043b\u0006\u0085"+ - "\t\u0000\u043b\u0117\u0001\u0000\u0000\u0000\u043c\u043d\u00034\u0014"+ - "\u0000\u043d\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006\u0086\t\u0000"+ - "\u043f\u0119\u0001\u0000\u0000\u0000\u0440\u0441\u00036\u0015\u0000\u0441"+ - "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006\u0087\t\u0000\u0443\u011b"+ - "\u0001\u0000\u0000\u0000\u0444\u0445\u0003B\u001b\u0000\u0445\u0446\u0001"+ - "\u0000\u0000\u0000\u0446\u0447\u0006\u0088\f\u0000\u0447\u0448\u0006\u0088"+ - "\r\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003j/\u0000"+ - "\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089\u0013\u0000"+ - "\u044c\u011f\u0001\u0000\u0000\u0000\u044d\u044e\u0003\u00aaO\u0000\u044e"+ - "\u044f\u0001\u0000\u0000\u0000\u044f\u0450\u0006\u008a\u0012\u0000\u0450"+ - "\u0121\u0001\u0000\u0000\u0000\u0451\u0452\u0003\u00a6M\u0000\u0452\u0453"+ - "\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u008b\u0019\u0000\u0454\u0123"+ - "\u0001\u0000\u0000\u0000\u0455\u0456\u00032\u0013\u0000\u0456\u0457\u0001"+ - "\u0000\u0000\u0000\u0457\u0458\u0006\u008c\t\u0000\u0458\u0125\u0001\u0000"+ - "\u0000\u0000\u0459\u045a\u00034\u0014\u0000\u045a\u045b\u0001\u0000\u0000"+ - "\u0000\u045b\u045c\u0006\u008d\t\u0000\u045c\u0127\u0001\u0000\u0000\u0000"+ - "\u045d\u045e\u00036\u0015\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f"+ - "\u0460\u0006\u008e\t\u0000\u0460\u0129\u0001\u0000\u0000\u0000\u0461\u0462"+ - "\u0003B\u001b\u0000\u0462\u0463\u0001\u0000\u0000\u0000\u0463\u0464\u0006"+ - "\u008f\f\u0000\u0464\u0465\u0006\u008f\r\u0000\u0465\u012b\u0001\u0000"+ - "\u0000\u0000\u0466\u0467\u0005i\u0000\u0000\u0467\u0468\u0005n\u0000\u0000"+ - "\u0468\u0469\u0005f\u0000\u0000\u0469\u046a\u0005o\u0000\u0000\u046a\u012d"+ - "\u0001\u0000\u0000\u0000\u046b\u046c\u00032\u0013\u0000\u046c\u046d\u0001"+ - "\u0000\u0000\u0000\u046d\u046e\u0006\u0091\t\u0000\u046e\u012f\u0001\u0000"+ - "\u0000\u0000\u046f\u0470\u00034\u0014\u0000\u0470\u0471\u0001\u0000\u0000"+ - "\u0000\u0471\u0472\u0006\u0092\t\u0000\u0472\u0131\u0001\u0000\u0000\u0000"+ - "\u0473\u0474\u00036\u0015\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475"+ - "\u0476\u0006\u0093\t\u0000\u0476\u0133\u0001\u0000\u0000\u0000\u0477\u0478"+ - "\u0003B\u001b\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479\u047a\u0006"+ - "\u0094\f\u0000\u047a\u047b\u0006\u0094\r\u0000\u047b\u0135\u0001\u0000"+ - "\u0000\u0000\u047c\u047d\u0005f\u0000\u0000\u047d\u047e\u0005u\u0000\u0000"+ - "\u047e\u047f\u0005n\u0000\u0000\u047f\u0480\u0005c\u0000\u0000\u0480\u0481"+ - "\u0005t\u0000\u0000\u0481\u0482\u0005i\u0000\u0000\u0482\u0483\u0005o"+ - "\u0000\u0000\u0483\u0484\u0005n\u0000\u0000\u0484\u0485\u0005s\u0000\u0000"+ - "\u0485\u0137\u0001\u0000\u0000\u0000\u0486\u0487\u00032\u0013\u0000\u0487"+ - "\u0488\u0001\u0000\u0000\u0000\u0488\u0489\u0006\u0096\t\u0000\u0489\u0139"+ - "\u0001\u0000\u0000\u0000\u048a\u048b\u00034\u0014\u0000\u048b\u048c\u0001"+ - "\u0000\u0000\u0000\u048c\u048d\u0006\u0097\t\u0000\u048d\u013b\u0001\u0000"+ - "\u0000\u0000\u048e\u048f\u00036\u0015\u0000\u048f\u0490\u0001\u0000\u0000"+ - "\u0000\u0490\u0491\u0006\u0098\t\u0000\u0491\u013d\u0001\u0000\u0000\u0000"+ - "\u0492\u0493\u0003\u00a4L\u0000\u0493\u0494\u0001\u0000\u0000\u0000\u0494"+ - "\u0495\u0006\u0099\u000e\u0000\u0495\u0496\u0006\u0099\r\u0000\u0496\u013f"+ - "\u0001\u0000\u0000\u0000\u0497\u0498\u0005:\u0000\u0000\u0498\u0141\u0001"+ - "\u0000\u0000\u0000\u0499\u049f\u0003N!\u0000\u049a\u049f\u0003D\u001c"+ - "\u0000\u049b\u049f\u0003j/\u0000\u049c\u049f\u0003F\u001d\u0000\u049d"+ - "\u049f\u0003T$\u0000\u049e\u0499\u0001\u0000\u0000\u0000\u049e\u049a\u0001"+ - "\u0000\u0000\u0000\u049e\u049b\u0001\u0000\u0000\u0000\u049e\u049c\u0001"+ - "\u0000\u0000\u0000\u049e\u049d\u0001\u0000\u0000\u0000\u049f\u04a0\u0001"+ - "\u0000\u0000\u0000\u04a0\u049e\u0001\u0000\u0000\u0000\u04a0\u04a1\u0001"+ - "\u0000\u0000\u0000\u04a1\u0143\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003"+ - "2\u0013\u0000\u04a3\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u009c"+ - "\t\u0000\u04a5\u0145\u0001\u0000\u0000\u0000\u04a6\u04a7\u00034\u0014"+ - "\u0000\u04a7\u04a8\u0001\u0000\u0000\u0000\u04a8\u04a9\u0006\u009d\t\u0000"+ - "\u04a9\u0147\u0001\u0000\u0000\u0000\u04aa\u04ab\u00036\u0015\u0000\u04ab"+ - "\u04ac\u0001\u0000\u0000\u0000\u04ac\u04ad\u0006\u009e\t\u0000\u04ad\u0149"+ - "\u0001\u0000\u0000\u0000:\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ - "\b\t\n\u000b\u01e3\u01ed\u01f1\u01f4\u01fd\u01ff\u020a\u0233\u0238\u0241"+ - "\u0248\u024d\u024f\u025a\u0262\u0265\u0267\u026c\u0271\u0277\u027e\u0283"+ - "\u0289\u028c\u0294\u0298\u031a\u031f\u0324\u0326\u032c\u036b\u0370\u0393"+ - "\u0397\u039c\u03a1\u03a6\u03a8\u03ac\u03ae\u03fb\u03ff\u0404\u049e\u04a0"+ - "\u001a\u0005\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001"+ - "\u0000\u0005\u0003\u0000\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000"+ - "\u0005\t\u0000\u0000\u0001\u0000\u0007@\u0000\u0005\u0000\u0000\u0007"+ - "\u001a\u0000\u0004\u0000\u0000\u0007A\u0000\u0007\"\u0000\u0007!\u0000"+ - "\u0007\u001b\u0000\u0007C\u0000\u0007$\u0000\u0007M\u0000\u0005\u000b"+ - "\u0000\u0005\u0007\u0000\u0007W\u0000\u0007V\u0000\u0007B\u0000"; + "\u0000\u00fa\u03f4\u0001\u0000\u0000\u0000\u00fc\u03fb\u0001\u0000\u0000"+ + "\u0000\u00fe\u0404\u0001\u0000\u0000\u0000\u0100\u040b\u0001\u0000\u0000"+ + "\u0000\u0102\u040f\u0001\u0000\u0000\u0000\u0104\u0413\u0001\u0000\u0000"+ + "\u0000\u0106\u0417\u0001\u0000\u0000\u0000\u0108\u041b\u0001\u0000\u0000"+ + "\u0000\u010a\u041f\u0001\u0000\u0000\u0000\u010c\u0425\u0001\u0000\u0000"+ + "\u0000\u010e\u0429\u0001\u0000\u0000\u0000\u0110\u042d\u0001\u0000\u0000"+ + "\u0000\u0112\u0431\u0001\u0000\u0000\u0000\u0114\u0435\u0001\u0000\u0000"+ + "\u0000\u0116\u0439\u0001\u0000\u0000\u0000\u0118\u043d\u0001\u0000\u0000"+ + "\u0000\u011a\u0441\u0001\u0000\u0000\u0000\u011c\u0445\u0001\u0000\u0000"+ + "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044e\u0001\u0000\u0000"+ + "\u0000\u0122\u0452\u0001\u0000\u0000\u0000\u0124\u0456\u0001\u0000\u0000"+ + "\u0000\u0126\u045a\u0001\u0000\u0000\u0000\u0128\u045e\u0001\u0000\u0000"+ + "\u0000\u012a\u0462\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ + "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u0470\u0001\u0000\u0000"+ + "\u0000\u0132\u0474\u0001\u0000\u0000\u0000\u0134\u0478\u0001\u0000\u0000"+ + "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0481\u0001\u0000\u0000"+ + "\u0000\u013a\u048b\u0001\u0000\u0000\u0000\u013c\u048f\u0001\u0000\u0000"+ + "\u0000\u013e\u0493\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ + "\u0000\u0142\u049c\u0001\u0000\u0000\u0000\u0144\u04a3\u0001\u0000\u0000"+ + "\u0000\u0146\u04a7\u0001\u0000\u0000\u0000\u0148\u04ab\u0001\u0000\u0000"+ + "\u0000\u014a\u04af\u0001\u0000\u0000\u0000\u014c\u014d\u0005d\u0000\u0000"+ + "\u014d\u014e\u0005i\u0000\u0000\u014e\u014f\u0005s\u0000\u0000\u014f\u0150"+ + "\u0005s\u0000\u0000\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005c"+ + "\u0000\u0000\u0152\u0153\u0005t\u0000\u0000\u0153\u0154\u0001\u0000\u0000"+ + "\u0000\u0154\u0155\u0006\u0000\u0000\u0000\u0155\r\u0001\u0000\u0000\u0000"+ + "\u0156\u0157\u0005d\u0000\u0000\u0157\u0158\u0005r\u0000\u0000\u0158\u0159"+ + "\u0005o\u0000\u0000\u0159\u015a\u0005p\u0000\u0000\u015a\u015b\u0001\u0000"+ + "\u0000\u0000\u015b\u015c\u0006\u0001\u0001\u0000\u015c\u000f\u0001\u0000"+ + "\u0000\u0000\u015d\u015e\u0005e\u0000\u0000\u015e\u015f\u0005n\u0000\u0000"+ + "\u015f\u0160\u0005r\u0000\u0000\u0160\u0161\u0005i\u0000\u0000\u0161\u0162"+ + "\u0005c\u0000\u0000\u0162\u0163\u0005h\u0000\u0000\u0163\u0164\u0001\u0000"+ + "\u0000\u0000\u0164\u0165\u0006\u0002\u0002\u0000\u0165\u0011\u0001\u0000"+ + "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005v\u0000\u0000"+ + "\u0168\u0169\u0005a\u0000\u0000\u0169\u016a\u0005l\u0000\u0000\u016a\u016b"+ + "\u0001\u0000\u0000\u0000\u016b\u016c\u0006\u0003\u0000\u0000\u016c\u0013"+ + "\u0001\u0000\u0000\u0000\u016d\u016e\u0005e\u0000\u0000\u016e\u016f\u0005"+ + "x\u0000\u0000\u016f\u0170\u0005p\u0000\u0000\u0170\u0171\u0005l\u0000"+ + "\u0000\u0171\u0172\u0005a\u0000\u0000\u0172\u0173\u0005i\u0000\u0000\u0173"+ + "\u0174\u0005n\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0176"+ + "\u0006\u0004\u0003\u0000\u0176\u0015\u0001\u0000\u0000\u0000\u0177\u0178"+ + "\u0005f\u0000\u0000\u0178\u0179\u0005r\u0000\u0000\u0179\u017a\u0005o"+ + "\u0000\u0000\u017a\u017b\u0005m\u0000\u0000\u017b\u017c\u0001\u0000\u0000"+ + "\u0000\u017c\u017d\u0006\u0005\u0004\u0000\u017d\u0017\u0001\u0000\u0000"+ + "\u0000\u017e\u017f\u0005g\u0000\u0000\u017f\u0180\u0005r\u0000\u0000\u0180"+ + "\u0181\u0005o\u0000\u0000\u0181\u0182\u0005k\u0000\u0000\u0182\u0183\u0001"+ + "\u0000\u0000\u0000\u0183\u0184\u0006\u0006\u0000\u0000\u0184\u0019\u0001"+ + "\u0000\u0000\u0000\u0185\u0186\u0005i\u0000\u0000\u0186\u0187\u0005n\u0000"+ + "\u0000\u0187\u0188\u0005l\u0000\u0000\u0188\u0189\u0005i\u0000\u0000\u0189"+ + "\u018a\u0005n\u0000\u0000\u018a\u018b\u0005e\u0000\u0000\u018b\u018c\u0005"+ + "s\u0000\u0000\u018c\u018d\u0005t\u0000\u0000\u018d\u018e\u0005a\u0000"+ + "\u0000\u018e\u018f\u0005t\u0000\u0000\u018f\u0190\u0005s\u0000\u0000\u0190"+ + "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\u0007\u0000\u0000\u0192"+ + "\u001b\u0001\u0000\u0000\u0000\u0193\u0194\u0005k\u0000\u0000\u0194\u0195"+ + "\u0005e\u0000\u0000\u0195\u0196\u0005e\u0000\u0000\u0196\u0197\u0005p"+ + "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198\u0199\u0006\b\u0001"+ + "\u0000\u0199\u001d\u0001\u0000\u0000\u0000\u019a\u019b\u0005l\u0000\u0000"+ + "\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005m\u0000\u0000\u019d\u019e"+ + "\u0005i\u0000\u0000\u019e\u019f\u0005t\u0000\u0000\u019f\u01a0\u0001\u0000"+ + "\u0000\u0000\u01a0\u01a1\u0006\t\u0000\u0000\u01a1\u001f\u0001\u0000\u0000"+ + "\u0000\u01a2\u01a3\u0005m\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4"+ + "\u01a5\u0005t\u0000\u0000\u01a5\u01a6\u0005a\u0000\u0000\u01a6\u01a7\u0001"+ + "\u0000\u0000\u0000\u01a7\u01a8\u0006\n\u0005\u0000\u01a8!\u0001\u0000"+ + "\u0000\u0000\u01a9\u01aa\u0005m\u0000\u0000\u01aa\u01ab\u0005v\u0000\u0000"+ + "\u01ab\u01ac\u0005_\u0000\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae"+ + "\u0005x\u0000\u0000\u01ae\u01af\u0005p\u0000\u0000\u01af\u01b0\u0005a"+ + "\u0000\u0000\u01b0\u01b1\u0005n\u0000\u0000\u01b1\u01b2\u0005d\u0000\u0000"+ + "\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u000b\u0006\u0000"+ + "\u01b4#\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005r\u0000\u0000\u01b6\u01b7"+ + "\u0005e\u0000\u0000\u01b7\u01b8\u0005n\u0000\u0000\u01b8\u01b9\u0005a"+ + "\u0000\u0000\u01b9\u01ba\u0005m\u0000\u0000\u01ba\u01bb\u0005e\u0000\u0000"+ + "\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\f\u0007\u0000\u01bd"+ + "%\u0001\u0000\u0000\u0000\u01be\u01bf\u0005r\u0000\u0000\u01bf\u01c0\u0005"+ + "o\u0000\u0000\u01c0\u01c1\u0005w\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000"+ + "\u0000\u01c2\u01c3\u0006\r\u0000\u0000\u01c3\'\u0001\u0000\u0000\u0000"+ + "\u01c4\u01c5\u0005s\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6\u01c7"+ + "\u0005o\u0000\u0000\u01c7\u01c8\u0005w\u0000\u0000\u01c8\u01c9\u0001\u0000"+ + "\u0000\u0000\u01c9\u01ca\u0006\u000e\b\u0000\u01ca)\u0001\u0000\u0000"+ + "\u0000\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005o\u0000\u0000\u01cd"+ + "\u01ce\u0005r\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0001"+ + "\u0000\u0000\u0000\u01d0\u01d1\u0006\u000f\u0000\u0000\u01d1+\u0001\u0000"+ + "\u0000\u0000\u01d2\u01d3\u0005s\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000"+ + "\u01d4\u01d5\u0005a\u0000\u0000\u01d5\u01d6\u0005t\u0000\u0000\u01d6\u01d7"+ + "\u0005s\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0006"+ + "\u0010\u0000\u0000\u01d9-\u0001\u0000\u0000\u0000\u01da\u01db\u0005w\u0000"+ + "\u0000\u01db\u01dc\u0005h\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd"+ + "\u01de\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0001"+ + "\u0000\u0000\u0000\u01e0\u01e1\u0006\u0011\u0000\u0000\u01e1/\u0001\u0000"+ + "\u0000\u0000\u01e2\u01e4\b\u0000\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000"+ + "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000"+ + "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ + "\u0000\u01e7\u01e8\u0006\u0012\u0000\u0000\u01e81\u0001\u0000\u0000\u0000"+ + "\u01e9\u01ea\u0005/\u0000\u0000\u01ea\u01eb\u0005/\u0000\u0000\u01eb\u01ef"+ + "\u0001\u0000\u0000\u0000\u01ec\u01ee\b\u0001\u0000\u0000\u01ed\u01ec\u0001"+ + "\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001"+ + "\u0000\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001"+ + "\u0000\u0000\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0005"+ + "\r\u0000\u0000\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000"+ + "\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f7\u0005\n\u0000"+ + "\u0000\u01f6\u01f5\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ + "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u0013\t\u0000"+ + "\u01f93\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005/\u0000\u0000\u01fb\u01fc"+ + "\u0005*\u0000\u0000\u01fc\u0201\u0001\u0000\u0000\u0000\u01fd\u0200\u0003"+ + "4\u0014\u0000\u01fe\u0200\t\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000"+ + "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000"+ + "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ + "\u0000\u0000\u0202\u0204\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000"+ + "\u0000\u0000\u0204\u0205\u0005*\u0000\u0000\u0205\u0206\u0005/\u0000\u0000"+ + "\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0006\u0014\t\u0000\u0208"+ + "5\u0001\u0000\u0000\u0000\u0209\u020b\u0007\u0002\u0000\u0000\u020a\u0209"+ + "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020a"+ + "\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e"+ + "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\u0015\t\u0000\u020f7\u0001"+ + "\u0000\u0000\u0000\u0210\u0211\u0003\u00a4L\u0000\u0211\u0212\u0001\u0000"+ + "\u0000\u0000\u0212\u0213\u0006\u0016\n\u0000\u0213\u0214\u0006\u0016\u000b"+ + "\u0000\u02149\u0001\u0000\u0000\u0000\u0215\u0216\u0003B\u001b\u0000\u0216"+ + "\u0217\u0001\u0000\u0000\u0000\u0217\u0218\u0006\u0017\f\u0000\u0218\u0219"+ + "\u0006\u0017\r\u0000\u0219;\u0001\u0000\u0000\u0000\u021a\u021b\u0003"+ + "6\u0015\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021d\u0006\u0018"+ + "\t\u0000\u021d=\u0001\u0000\u0000\u0000\u021e\u021f\u00032\u0013\u0000"+ + "\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0221\u0006\u0019\t\u0000\u0221"+ + "?\u0001\u0000\u0000\u0000\u0222\u0223\u00034\u0014\u0000\u0223\u0224\u0001"+ + "\u0000\u0000\u0000\u0224\u0225\u0006\u001a\t\u0000\u0225A\u0001\u0000"+ + "\u0000\u0000\u0226\u0227\u0005|\u0000\u0000\u0227\u0228\u0001\u0000\u0000"+ + "\u0000\u0228\u0229\u0006\u001b\r\u0000\u0229C\u0001\u0000\u0000\u0000"+ + "\u022a\u022b\u0007\u0003\u0000\u0000\u022bE\u0001\u0000\u0000\u0000\u022c"+ + "\u022d\u0007\u0004\u0000\u0000\u022dG\u0001\u0000\u0000\u0000\u022e\u022f"+ + "\u0005\\\u0000\u0000\u022f\u0230\u0007\u0005\u0000\u0000\u0230I\u0001"+ + "\u0000\u0000\u0000\u0231\u0232\b\u0006\u0000\u0000\u0232K\u0001\u0000"+ + "\u0000\u0000\u0233\u0235\u0007\u0007\u0000\u0000\u0234\u0236\u0007\b\u0000"+ + "\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000"+ + "\u0000\u0236\u0238\u0001\u0000\u0000\u0000\u0237\u0239\u0003D\u001c\u0000"+ + "\u0238\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ + "\u023a\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000"+ + "\u023bM\u0001\u0000\u0000\u0000\u023c\u023d\u0005@\u0000\u0000\u023dO"+ + "\u0001\u0000\u0000\u0000\u023e\u023f\u0005`\u0000\u0000\u023fQ\u0001\u0000"+ + "\u0000\u0000\u0240\u0244\b\t\u0000\u0000\u0241\u0242\u0005`\u0000\u0000"+ + "\u0242\u0244\u0005`\u0000\u0000\u0243\u0240\u0001\u0000\u0000\u0000\u0243"+ + "\u0241\u0001\u0000\u0000\u0000\u0244S\u0001\u0000\u0000\u0000\u0245\u0246"+ + "\u0005_\u0000\u0000\u0246U\u0001\u0000\u0000\u0000\u0247\u024b\u0003F"+ + "\u001d\u0000\u0248\u024b\u0003D\u001c\u0000\u0249\u024b\u0003T$\u0000"+ + "\u024a\u0247\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000"+ + "\u024a\u0249\u0001\u0000\u0000\u0000\u024bW\u0001\u0000\u0000\u0000\u024c"+ + "\u0251\u0005\"\u0000\u0000\u024d\u0250\u0003H\u001e\u0000\u024e\u0250"+ + "\u0003J\u001f\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u024f\u024e\u0001"+ + "\u0000\u0000\u0000\u0250\u0253\u0001\u0000\u0000\u0000\u0251\u024f\u0001"+ + "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0254\u0001"+ + "\u0000\u0000\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0254\u026a\u0005"+ + "\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000"+ + "\u0000\u0257\u0258\u0005\"\u0000\u0000\u0258\u025c\u0001\u0000\u0000\u0000"+ + "\u0259\u025b\b\u0001\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b"+ + "\u025e\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c"+ + "\u025a\u0001\u0000\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e"+ + "\u025c\u0001\u0000\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0261"+ + "\u0005\"\u0000\u0000\u0261\u0262\u0005\"\u0000\u0000\u0262\u0264\u0001"+ + "\u0000\u0000\u0000\u0263\u0265\u0005\"\u0000\u0000\u0264\u0263\u0001\u0000"+ + "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0267\u0001\u0000"+ + "\u0000\u0000\u0266\u0268\u0005\"\u0000\u0000\u0267\u0266\u0001\u0000\u0000"+ + "\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000"+ + "\u0000\u0269\u024c\u0001\u0000\u0000\u0000\u0269\u0255\u0001\u0000\u0000"+ + "\u0000\u026aY\u0001\u0000\u0000\u0000\u026b\u026d\u0003D\u001c\u0000\u026c"+ + "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ + "\u026c\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ + "[\u0001\u0000\u0000\u0000\u0270\u0272\u0003D\u001c\u0000\u0271\u0270\u0001"+ + "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ + "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ + "\u0000\u0000\u0000\u0275\u0279\u0003l0\u0000\u0276\u0278\u0003D\u001c"+ + "\u0000\u0277\u0276\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000"+ + "\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000"+ + "\u0000\u027a\u029b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000"+ + "\u0000\u027c\u027e\u0003l0\u0000\u027d\u027f\u0003D\u001c\u0000\u027e"+ + "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ + "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ + "\u029b\u0001\u0000\u0000\u0000\u0282\u0284\u0003D\u001c\u0000\u0283\u0282"+ + "\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0283"+ + "\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u028e"+ + "\u0001\u0000\u0000\u0000\u0287\u028b\u0003l0\u0000\u0288\u028a\u0003D"+ + "\u001c\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028d\u0001\u0000"+ + "\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000"+ + "\u0000\u0000\u028c\u028f\u0001\u0000\u0000\u0000\u028d\u028b\u0001\u0000"+ + "\u0000\u0000\u028e\u0287\u0001\u0000\u0000\u0000\u028e\u028f\u0001\u0000"+ + "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0003L \u0000"+ + "\u0291\u029b\u0001\u0000\u0000\u0000\u0292\u0294\u0003l0\u0000\u0293\u0295"+ + "\u0003D\u001c\u0000\u0294\u0293\u0001\u0000\u0000\u0000\u0295\u0296\u0001"+ + "\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000\u0296\u0297\u0001"+ + "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u0299\u0003"+ + "L \u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0271\u0001\u0000\u0000"+ + "\u0000\u029a\u027c\u0001\u0000\u0000\u0000\u029a\u0283\u0001\u0000\u0000"+ + "\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b]\u0001\u0000\u0000\u0000"+ + "\u029c\u029d\u0005b\u0000\u0000\u029d\u029e\u0005y\u0000\u0000\u029e_"+ + "\u0001\u0000\u0000\u0000\u029f\u02a0\u0005a\u0000\u0000\u02a0\u02a1\u0005"+ + "n\u0000\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2a\u0001\u0000\u0000"+ + "\u0000\u02a3\u02a4\u0005a\u0000\u0000\u02a4\u02a5\u0005s\u0000\u0000\u02a5"+ + "\u02a6\u0005c\u0000\u0000\u02a6c\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005"+ + "=\u0000\u0000\u02a8e\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005:\u0000"+ + "\u0000\u02aa\u02ab\u0005:\u0000\u0000\u02abg\u0001\u0000\u0000\u0000\u02ac"+ + "\u02ad\u0005,\u0000\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005"+ + "d\u0000\u0000\u02af\u02b0\u0005e\u0000\u0000\u02b0\u02b1\u0005s\u0000"+ + "\u0000\u02b1\u02b2\u0005c\u0000\u0000\u02b2k\u0001\u0000\u0000\u0000\u02b3"+ + "\u02b4\u0005.\u0000\u0000\u02b4m\u0001\u0000\u0000\u0000\u02b5\u02b6\u0005"+ + "f\u0000\u0000\u02b6\u02b7\u0005a\u0000\u0000\u02b7\u02b8\u0005l\u0000"+ + "\u0000\u02b8\u02b9\u0005s\u0000\u0000\u02b9\u02ba\u0005e\u0000\u0000\u02ba"+ + "o\u0001\u0000\u0000\u0000\u02bb\u02bc\u0005f\u0000\u0000\u02bc\u02bd\u0005"+ + "i\u0000\u0000\u02bd\u02be\u0005r\u0000\u0000\u02be\u02bf\u0005s\u0000"+ + "\u0000\u02bf\u02c0\u0005t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1"+ + "\u02c2\u0005l\u0000\u0000\u02c2\u02c3\u0005a\u0000\u0000\u02c3\u02c4\u0005"+ + "s\u0000\u0000\u02c4\u02c5\u0005t\u0000\u0000\u02c5s\u0001\u0000\u0000"+ + "\u0000\u02c6\u02c7\u0005(\u0000\u0000\u02c7u\u0001\u0000\u0000\u0000\u02c8"+ + "\u02c9\u0005i\u0000\u0000\u02c9\u02ca\u0005n\u0000\u0000\u02caw\u0001"+ + "\u0000\u0000\u0000\u02cb\u02cc\u0005i\u0000\u0000\u02cc\u02cd\u0005s\u0000"+ + "\u0000\u02cdy\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005l\u0000\u0000\u02cf"+ + "\u02d0\u0005i\u0000\u0000\u02d0\u02d1\u0005k\u0000\u0000\u02d1\u02d2\u0005"+ + "e\u0000\u0000\u02d2{\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005n\u0000"+ + "\u0000\u02d4\u02d5\u0005o\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6"+ + "}\u0001\u0000\u0000\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005"+ + "u\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000"+ + "\u0000\u02db\u007f\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005n\u0000\u0000"+ + "\u02dd\u02de\u0005u\u0000\u0000\u02de\u02df\u0005l\u0000\u0000\u02df\u02e0"+ + "\u0005l\u0000\u0000\u02e0\u02e1\u0005s\u0000\u0000\u02e1\u0081\u0001\u0000"+ + "\u0000\u0000\u02e2\u02e3\u0005o\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000"+ + "\u02e4\u0083\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005?\u0000\u0000\u02e6"+ + "\u0085\u0001\u0000\u0000\u0000\u02e7\u02e8\u0005r\u0000\u0000\u02e8\u02e9"+ + "\u0005l\u0000\u0000\u02e9\u02ea\u0005i\u0000\u0000\u02ea\u02eb\u0005k"+ + "\u0000\u0000\u02eb\u02ec\u0005e\u0000\u0000\u02ec\u0087\u0001\u0000\u0000"+ + "\u0000\u02ed\u02ee\u0005)\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000"+ + "\u02ef\u02f0\u0005t\u0000\u0000\u02f0\u02f1\u0005r\u0000\u0000\u02f1\u02f2"+ + "\u0005u\u0000\u0000\u02f2\u02f3\u0005e\u0000\u0000\u02f3\u008b\u0001\u0000"+ + "\u0000\u0000\u02f4\u02f5\u0005=\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000"+ + "\u02f6\u008d\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005=\u0000\u0000\u02f8"+ + "\u02f9\u0005~\u0000\u0000\u02f9\u008f\u0001\u0000\u0000\u0000\u02fa\u02fb"+ + "\u0005!\u0000\u0000\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0091\u0001\u0000"+ + "\u0000\u0000\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u0093\u0001\u0000\u0000"+ + "\u0000\u02ff\u0300\u0005<\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301"+ + "\u0095\u0001\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0097"+ + "\u0001\u0000\u0000\u0000\u0304\u0305\u0005>\u0000\u0000\u0305\u0306\u0005"+ + "=\u0000\u0000\u0306\u0099\u0001\u0000\u0000\u0000\u0307\u0308\u0005+\u0000"+ + "\u0000\u0308\u009b\u0001\u0000\u0000\u0000\u0309\u030a\u0005-\u0000\u0000"+ + "\u030a\u009d\u0001\u0000\u0000\u0000\u030b\u030c\u0005*\u0000\u0000\u030c"+ + "\u009f\u0001\u0000\u0000\u0000\u030d\u030e\u0005/\u0000\u0000\u030e\u00a1"+ + "\u0001\u0000\u0000\u0000\u030f\u0310\u0005%\u0000\u0000\u0310\u00a3\u0001"+ + "\u0000\u0000\u0000\u0311\u0312\u0005[\u0000\u0000\u0312\u0313\u0001\u0000"+ + "\u0000\u0000\u0313\u0314\u0006L\u0000\u0000\u0314\u0315\u0006L\u0000\u0000"+ + "\u0315\u00a5\u0001\u0000\u0000\u0000\u0316\u0317\u0005]\u0000\u0000\u0317"+ + "\u0318\u0001\u0000\u0000\u0000\u0318\u0319\u0006M\r\u0000\u0319\u031a"+ + "\u0006M\r\u0000\u031a\u00a7\u0001\u0000\u0000\u0000\u031b\u031f\u0003"+ + "F\u001d\u0000\u031c\u031e\u0003V%\u0000\u031d\u031c\u0001\u0000\u0000"+ + "\u0000\u031e\u0321\u0001\u0000\u0000\u0000\u031f\u031d\u0001\u0000\u0000"+ + "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u032c\u0001\u0000\u0000"+ + "\u0000\u0321\u031f\u0001\u0000\u0000\u0000\u0322\u0325\u0003T$\u0000\u0323"+ + "\u0325\u0003N!\u0000\u0324\u0322\u0001\u0000\u0000\u0000\u0324\u0323\u0001"+ + "\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000\u0000\u0326\u0328\u0003"+ + "V%\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000"+ + "\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000"+ + "\u0000\u032a\u032c\u0001\u0000\u0000\u0000\u032b\u031b\u0001\u0000\u0000"+ + "\u0000\u032b\u0324\u0001\u0000\u0000\u0000\u032c\u00a9\u0001\u0000\u0000"+ + "\u0000\u032d\u032f\u0003P\"\u0000\u032e\u0330\u0003R#\u0000\u032f\u032e"+ + "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ + "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u0333"+ + "\u0001\u0000\u0000\u0000\u0333\u0334\u0003P\"\u0000\u0334\u00ab\u0001"+ + "\u0000\u0000\u0000\u0335\u0336\u0003\u00aaO\u0000\u0336\u00ad\u0001\u0000"+ + "\u0000\u0000\u0337\u0338\u00032\u0013\u0000\u0338\u0339\u0001\u0000\u0000"+ + "\u0000\u0339\u033a\u0006Q\t\u0000\u033a\u00af\u0001\u0000\u0000\u0000"+ + "\u033b\u033c\u00034\u0014\u0000\u033c\u033d\u0001\u0000\u0000\u0000\u033d"+ + "\u033e\u0006R\t\u0000\u033e\u00b1\u0001\u0000\u0000\u0000\u033f\u0340"+ + "\u00036\u0015\u0000\u0340\u0341\u0001\u0000\u0000\u0000\u0341\u0342\u0006"+ + "S\t\u0000\u0342\u00b3\u0001\u0000\u0000\u0000\u0343\u0344\u0003B\u001b"+ + "\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006T\f\u0000"+ + "\u0346\u0347\u0006T\r\u0000\u0347\u00b5\u0001\u0000\u0000\u0000\u0348"+ + "\u0349\u0003\u00a4L\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b"+ + "\u0006U\n\u0000\u034b\u00b7\u0001\u0000\u0000\u0000\u034c\u034d\u0003"+ + "\u00a6M\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034f\u0006V\u000e"+ + "\u0000\u034f\u00b9\u0001\u0000\u0000\u0000\u0350\u0351\u0003h.\u0000\u0351"+ + "\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0006W\u000f\u0000\u0353\u00bb"+ + "\u0001\u0000\u0000\u0000\u0354\u0355\u0003d,\u0000\u0355\u0356\u0001\u0000"+ + "\u0000\u0000\u0356\u0357\u0006X\u0010\u0000\u0357\u00bd\u0001\u0000\u0000"+ + "\u0000\u0358\u0359\u0003X&\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ + "\u035b\u0006Y\u0011\u0000\u035b\u00bf\u0001\u0000\u0000\u0000\u035c\u035d"+ + "\u0005o\u0000\u0000\u035d\u035e\u0005p\u0000\u0000\u035e\u035f\u0005t"+ + "\u0000\u0000\u035f\u0360\u0005i\u0000\u0000\u0360\u0361\u0005o\u0000\u0000"+ + "\u0361\u0362\u0005n\u0000\u0000\u0362\u0363\u0005s\u0000\u0000\u0363\u00c1"+ + "\u0001\u0000\u0000\u0000\u0364\u0365\u0005m\u0000\u0000\u0365\u0366\u0005"+ + "e\u0000\u0000\u0366\u0367\u0005t\u0000\u0000\u0367\u0368\u0005a\u0000"+ + "\u0000\u0368\u0369\u0005d\u0000\u0000\u0369\u036a\u0005a\u0000\u0000\u036a"+ + "\u036b\u0005t\u0000\u0000\u036b\u036c\u0005a\u0000\u0000\u036c\u00c3\u0001"+ + "\u0000\u0000\u0000\u036d\u0371\b\n\u0000\u0000\u036e\u036f\u0005/\u0000"+ + "\u0000\u036f\u0371\b\u000b\u0000\u0000\u0370\u036d\u0001\u0000\u0000\u0000"+ + "\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u00c5\u0001\u0000\u0000\u0000"+ + "\u0372\u0374\u0003\u00c4\\\u0000\u0373\u0372\u0001\u0000\u0000\u0000\u0374"+ + "\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375"+ + "\u0376\u0001\u0000\u0000\u0000\u0376\u00c7\u0001\u0000\u0000\u0000\u0377"+ + "\u0378\u0003\u00acP\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a"+ + "\u0006^\u0012\u0000\u037a\u00c9\u0001\u0000\u0000\u0000\u037b\u037c\u0003"+ + "2\u0013\u0000\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006_\t"+ + "\u0000\u037e\u00cb\u0001\u0000\u0000\u0000\u037f\u0380\u00034\u0014\u0000"+ + "\u0380\u0381\u0001\u0000\u0000\u0000\u0381\u0382\u0006`\t\u0000\u0382"+ + "\u00cd\u0001\u0000\u0000\u0000\u0383\u0384\u00036\u0015\u0000\u0384\u0385"+ + "\u0001\u0000\u0000\u0000\u0385\u0386\u0006a\t\u0000\u0386\u00cf\u0001"+ + "\u0000\u0000\u0000\u0387\u0388\u0003B\u001b\u0000\u0388\u0389\u0001\u0000"+ + "\u0000\u0000\u0389\u038a\u0006b\f\u0000\u038a\u038b\u0006b\r\u0000\u038b"+ + "\u00d1\u0001\u0000\u0000\u0000\u038c\u038d\u0003l0\u0000\u038d\u038e\u0001"+ + "\u0000\u0000\u0000\u038e\u038f\u0006c\u0013\u0000\u038f\u00d3\u0001\u0000"+ + "\u0000\u0000\u0390\u0391\u0003h.\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ + "\u0392\u0393\u0006d\u000f\u0000\u0393\u00d5\u0001\u0000\u0000\u0000\u0394"+ + "\u0399\u0003F\u001d\u0000\u0395\u0399\u0003D\u001c\u0000\u0396\u0399\u0003"+ + "T$\u0000\u0397\u0399\u0003\u009eI\u0000\u0398\u0394\u0001\u0000\u0000"+ + "\u0000\u0398\u0395\u0001\u0000\u0000\u0000\u0398\u0396\u0001\u0000\u0000"+ + "\u0000\u0398\u0397\u0001\u0000\u0000\u0000\u0399\u00d7\u0001\u0000\u0000"+ + "\u0000\u039a\u039d\u0003F\u001d\u0000\u039b\u039d\u0003\u009eI\u0000\u039c"+ + "\u039a\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d"+ + "\u03a1\u0001\u0000\u0000\u0000\u039e\u03a0\u0003\u00d6e\u0000\u039f\u039e"+ + "\u0001\u0000\u0000\u0000\u03a0\u03a3\u0001\u0000\u0000\u0000\u03a1\u039f"+ + "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03ae"+ + "\u0001\u0000\u0000\u0000\u03a3\u03a1\u0001\u0000\u0000\u0000\u03a4\u03a7"+ + "\u0003T$\u0000\u03a5\u03a7\u0003N!\u0000\u03a6\u03a4\u0001\u0000\u0000"+ + "\u0000\u03a6\u03a5\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000"+ + "\u0000\u03a8\u03aa\u0003\u00d6e\u0000\u03a9\u03a8\u0001\u0000\u0000\u0000"+ + "\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000"+ + "\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ae\u0001\u0000\u0000\u0000"+ + "\u03ad\u039c\u0001\u0000\u0000\u0000\u03ad\u03a6\u0001\u0000\u0000\u0000"+ + "\u03ae\u00d9\u0001\u0000\u0000\u0000\u03af\u03b2\u0003\u00d8f\u0000\u03b0"+ + "\u03b2\u0003\u00aaO\u0000\u03b1\u03af\u0001\u0000\u0000\u0000\u03b1\u03b0"+ + "\u0001\u0000\u0000\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b1"+ + "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u00db"+ + "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00032\u0013\u0000\u03b6\u03b7\u0001"+ + "\u0000\u0000\u0000\u03b7\u03b8\u0006h\t\u0000\u03b8\u00dd\u0001\u0000"+ + "\u0000\u0000\u03b9\u03ba\u00034\u0014\u0000\u03ba\u03bb\u0001\u0000\u0000"+ + "\u0000\u03bb\u03bc\u0006i\t\u0000\u03bc\u00df\u0001\u0000\u0000\u0000"+ + "\u03bd\u03be\u00036\u0015\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf"+ + "\u03c0\u0006j\t\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2"+ + "\u0003B\u001b\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006"+ + "k\f\u0000\u03c4\u03c5\u0006k\r\u0000\u03c5\u00e3\u0001\u0000\u0000\u0000"+ + "\u03c6\u03c7\u0003d,\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9"+ + "\u0006l\u0010\u0000\u03c9\u00e5\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003"+ + "h.\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006m\u000f"+ + "\u0000\u03cd\u00e7\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003l0\u0000\u03cf"+ + "\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006n\u0013\u0000\u03d1\u00e9"+ + "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0005a\u0000\u0000\u03d3\u03d4\u0005"+ + "s\u0000\u0000\u03d4\u00eb\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00da"+ + "g\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006p\u0014\u0000"+ + "\u03d8\u00ed\u0001\u0000\u0000\u0000\u03d9\u03da\u00032\u0013\u0000\u03da"+ + "\u03db\u0001\u0000\u0000\u0000\u03db\u03dc\u0006q\t\u0000\u03dc\u00ef"+ + "\u0001\u0000\u0000\u0000\u03dd\u03de\u00034\u0014\u0000\u03de\u03df\u0001"+ + "\u0000\u0000\u0000\u03df\u03e0\u0006r\t\u0000\u03e0\u00f1\u0001\u0000"+ + "\u0000\u0000\u03e1\u03e2\u00036\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000"+ + "\u0000\u03e3\u03e4\u0006s\t\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000"+ + "\u03e5\u03e6\u0003B\u001b\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7"+ + "\u03e8\u0006t\f\u0000\u03e8\u03e9\u0006t\r\u0000\u03e9\u00f5\u0001\u0000"+ + "\u0000\u0000\u03ea\u03eb\u0003\u00a4L\u0000\u03eb\u03ec\u0001\u0000\u0000"+ + "\u0000\u03ec\u03ed\u0006u\n\u0000\u03ed\u03ee\u0006u\u0015\u0000\u03ee"+ + "\u00f7\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005o\u0000\u0000\u03f0\u03f1"+ + "\u0005n\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006"+ + "v\u0016\u0000\u03f3\u00f9\u0001\u0000\u0000\u0000\u03f4\u03f5\u0005w\u0000"+ + "\u0000\u03f5\u03f6\u0005i\u0000\u0000\u03f6\u03f7\u0005t\u0000\u0000\u03f7"+ + "\u03f8\u0005h\u0000\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa"+ + "\u0006w\u0016\u0000\u03fa\u00fb\u0001\u0000\u0000\u0000\u03fb\u03fc\b"+ + "\f\u0000\u0000\u03fc\u00fd\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003\u00fc"+ + "x\u0000\u03fe\u03fd\u0001\u0000\u0000\u0000\u03ff\u0400\u0001\u0000\u0000"+ + "\u0000\u0400\u03fe\u0001\u0000\u0000\u0000\u0400\u0401\u0001\u0000\u0000"+ + "\u0000\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0003\u0142\u009b"+ + "\u0000\u0403\u0405\u0001\u0000\u0000\u0000\u0404\u03fe\u0001\u0000\u0000"+ + "\u0000\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0407\u0001\u0000\u0000"+ + "\u0000\u0406\u0408\u0003\u00fcx\u0000\u0407\u0406\u0001\u0000\u0000\u0000"+ + "\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u0407\u0001\u0000\u0000\u0000"+ + "\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u00ff\u0001\u0000\u0000\u0000"+ + "\u040b\u040c\u0003\u00acP\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d"+ + "\u040e\u0006z\u0012\u0000\u040e\u0101\u0001\u0000\u0000\u0000\u040f\u0410"+ + "\u0003\u00fey\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ + "{\u0017\u0000\u0412\u0103\u0001\u0000\u0000\u0000\u0413\u0414\u00032\u0013"+ + "\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006|\t\u0000"+ + "\u0416\u0105\u0001\u0000\u0000\u0000\u0417\u0418\u00034\u0014\u0000\u0418"+ + "\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006}\t\u0000\u041a\u0107"+ + "\u0001\u0000\u0000\u0000\u041b\u041c\u00036\u0015\u0000\u041c\u041d\u0001"+ + "\u0000\u0000\u0000\u041d\u041e\u0006~\t\u0000\u041e\u0109\u0001\u0000"+ + "\u0000\u0000\u041f\u0420\u0003B\u001b\u0000\u0420\u0421\u0001\u0000\u0000"+ + "\u0000\u0421\u0422\u0006\u007f\f\u0000\u0422\u0423\u0006\u007f\r\u0000"+ + "\u0423\u0424\u0006\u007f\r\u0000\u0424\u010b\u0001\u0000\u0000\u0000\u0425"+ + "\u0426\u0003d,\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006"+ + "\u0080\u0010\u0000\u0428\u010d\u0001\u0000\u0000\u0000\u0429\u042a\u0003"+ + "h.\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006\u0081\u000f"+ + "\u0000\u042c\u010f\u0001\u0000\u0000\u0000\u042d\u042e\u0003l0\u0000\u042e"+ + "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006\u0082\u0013\u0000\u0430"+ + "\u0111\u0001\u0000\u0000\u0000\u0431\u0432\u0003\u00faw\u0000\u0432\u0433"+ + "\u0001\u0000\u0000\u0000\u0433\u0434\u0006\u0083\u0018\u0000\u0434\u0113"+ + "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00dag\u0000\u0436\u0437\u0001"+ + "\u0000\u0000\u0000\u0437\u0438\u0006\u0084\u0014\u0000\u0438\u0115\u0001"+ + "\u0000\u0000\u0000\u0439\u043a\u0003\u00acP\u0000\u043a\u043b\u0001\u0000"+ + "\u0000\u0000\u043b\u043c\u0006\u0085\u0012\u0000\u043c\u0117\u0001\u0000"+ + "\u0000\u0000\u043d\u043e\u00032\u0013\u0000\u043e\u043f\u0001\u0000\u0000"+ + "\u0000\u043f\u0440\u0006\u0086\t\u0000\u0440\u0119\u0001\u0000\u0000\u0000"+ + "\u0441\u0442\u00034\u0014\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ + "\u0444\u0006\u0087\t\u0000\u0444\u011b\u0001\u0000\u0000\u0000\u0445\u0446"+ + "\u00036\u0015\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ + "\u0088\t\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003B"+ + "\u001b\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089"+ + "\f\u0000\u044c\u044d\u0006\u0089\r\u0000\u044d\u011f\u0001\u0000\u0000"+ + "\u0000\u044e\u044f\u0003l0\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450"+ + "\u0451\u0006\u008a\u0013\u0000\u0451\u0121\u0001\u0000\u0000\u0000\u0452"+ + "\u0453\u0003\u00acP\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455"+ + "\u0006\u008b\u0012\u0000\u0455\u0123\u0001\u0000\u0000\u0000\u0456\u0457"+ + "\u0003\u00a8N\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006"+ + "\u008c\u0019\u0000\u0459\u0125\u0001\u0000\u0000\u0000\u045a\u045b\u0003"+ + "2\u0013\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006\u008d"+ + "\t\u0000\u045d\u0127\u0001\u0000\u0000\u0000\u045e\u045f\u00034\u0014"+ + "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006\u008e\t\u0000"+ + "\u0461\u0129\u0001\u0000\u0000\u0000\u0462\u0463\u00036\u0015\u0000\u0463"+ + "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006\u008f\t\u0000\u0465\u012b"+ + "\u0001\u0000\u0000\u0000\u0466\u0467\u0003B\u001b\u0000\u0467\u0468\u0001"+ + "\u0000\u0000\u0000\u0468\u0469\u0006\u0090\f\u0000\u0469\u046a\u0006\u0090"+ + "\r\u0000\u046a\u012d\u0001\u0000\u0000\u0000\u046b\u046c\u0005i\u0000"+ + "\u0000\u046c\u046d\u0005n\u0000\u0000\u046d\u046e\u0005f\u0000\u0000\u046e"+ + "\u046f\u0005o\u0000\u0000\u046f\u012f\u0001\u0000\u0000\u0000\u0470\u0471"+ + "\u00032\u0013\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006"+ + "\u0092\t\u0000\u0473\u0131\u0001\u0000\u0000\u0000\u0474\u0475\u00034"+ + "\u0014\u0000\u0475\u0476\u0001\u0000\u0000\u0000\u0476\u0477\u0006\u0093"+ + "\t\u0000\u0477\u0133\u0001\u0000\u0000\u0000\u0478\u0479\u00036\u0015"+ + "\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0094\t\u0000"+ + "\u047b\u0135\u0001\u0000\u0000\u0000\u047c\u047d\u0003B\u001b\u0000\u047d"+ + "\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0095\f\u0000\u047f\u0480"+ + "\u0006\u0095\r\u0000\u0480\u0137\u0001\u0000\u0000\u0000\u0481\u0482\u0005"+ + "f\u0000\u0000\u0482\u0483\u0005u\u0000\u0000\u0483\u0484\u0005n\u0000"+ + "\u0000\u0484\u0485\u0005c\u0000\u0000\u0485\u0486\u0005t\u0000\u0000\u0486"+ + "\u0487\u0005i\u0000\u0000\u0487\u0488\u0005o\u0000\u0000\u0488\u0489\u0005"+ + "n\u0000\u0000\u0489\u048a\u0005s\u0000\u0000\u048a\u0139\u0001\u0000\u0000"+ + "\u0000\u048b\u048c\u00032\u0013\u0000\u048c\u048d\u0001\u0000\u0000\u0000"+ + "\u048d\u048e\u0006\u0097\t\u0000\u048e\u013b\u0001\u0000\u0000\u0000\u048f"+ + "\u0490\u00034\u0014\u0000\u0490\u0491\u0001\u0000\u0000\u0000\u0491\u0492"+ + "\u0006\u0098\t\u0000\u0492\u013d\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ + "6\u0015\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006\u0099"+ + "\t\u0000\u0496\u013f\u0001\u0000\u0000\u0000\u0497\u0498\u0003\u00a6M"+ + "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u009a\u000e"+ + "\u0000\u049a\u049b\u0006\u009a\r\u0000\u049b\u0141\u0001\u0000\u0000\u0000"+ + "\u049c\u049d\u0005:\u0000\u0000\u049d\u0143\u0001\u0000\u0000\u0000\u049e"+ + "\u04a4\u0003N!\u0000\u049f\u04a4\u0003D\u001c\u0000\u04a0\u04a4\u0003"+ + "l0\u0000\u04a1\u04a4\u0003F\u001d\u0000\u04a2\u04a4\u0003T$\u0000\u04a3"+ + "\u049e\u0001\u0000\u0000\u0000\u04a3\u049f\u0001\u0000\u0000\u0000\u04a3"+ + "\u04a0\u0001\u0000\u0000\u0000\u04a3\u04a1\u0001\u0000\u0000\u0000\u04a3"+ + "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ + "\u04a3\u0001\u0000\u0000\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6"+ + "\u0145\u0001\u0000\u0000\u0000\u04a7\u04a8\u00032\u0013\u0000\u04a8\u04a9"+ + "\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006\u009d\t\u0000\u04aa\u0147\u0001"+ + "\u0000\u0000\u0000\u04ab\u04ac\u00034\u0014\u0000\u04ac\u04ad\u0001\u0000"+ + "\u0000\u0000\u04ad\u04ae\u0006\u009e\t\u0000\u04ae\u0149\u0001\u0000\u0000"+ + "\u0000\u04af\u04b0\u00036\u0015\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000"+ + "\u04b1\u04b2\u0006\u009f\t\u0000\u04b2\u014b\u0001\u0000\u0000\u0000:"+ + "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e5\u01ef"+ + "\u01f3\u01f6\u01ff\u0201\u020c\u0235\u023a\u0243\u024a\u024f\u0251\u025c"+ + "\u0264\u0267\u0269\u026e\u0273\u0279\u0280\u0285\u028b\u028e\u0296\u029a"+ + "\u031f\u0324\u0329\u032b\u0331\u0370\u0375\u0398\u039c\u03a1\u03a6\u03ab"+ + "\u03ad\u03b1\u03b3\u0400\u0404\u0409\u04a3\u04a5\u001a\u0005\u0002\u0000"+ + "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ + "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ + "\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ + "\u0007B\u0000\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007D\u0000"+ + "\u0007%\u0000\u0007N\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007X"+ + "\u0000\u0007W\u0000\u0007C\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 0e3df1df3978e..b4a8e60dd69aa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -33,6 +33,7 @@ null 'and' 'asc' '=' +'::' ',' 'desc' '.' @@ -145,6 +146,7 @@ BY AND ASC ASSIGN +CAST_OP COMMA DESC DOT @@ -234,6 +236,7 @@ valueExpression operatorExpression primaryExpression functionExpression +dataType rowCommand fields field @@ -279,4 +282,4 @@ enrichWithClause atn: -[4, 1, 109, 530, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 116, 8, 1, 10, 1, 12, 1, 119, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 126, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 141, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 153, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 160, 8, 5, 10, 5, 12, 5, 163, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 3, 5, 174, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 196, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 208, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 214, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 222, 8, 8, 10, 8, 12, 8, 225, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 234, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 242, 8, 10, 10, 10, 12, 10, 245, 9, 10, 3, 10, 247, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 257, 8, 12, 10, 12, 12, 12, 260, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 267, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 273, 8, 14, 10, 14, 12, 14, 276, 9, 14, 1, 14, 3, 14, 279, 8, 14, 1, 14, 3, 14, 282, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 290, 8, 16, 10, 16, 12, 16, 293, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 3, 18, 301, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 307, 8, 19, 10, 19, 12, 19, 310, 9, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 3, 22, 321, 8, 22, 1, 22, 1, 22, 3, 22, 325, 8, 22, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 331, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 366, 8, 28, 10, 28, 12, 28, 369, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 377, 8, 28, 10, 28, 12, 28, 380, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 388, 8, 28, 10, 28, 12, 28, 391, 9, 28, 1, 28, 1, 28, 3, 28, 395, 8, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 404, 8, 30, 10, 30, 12, 30, 407, 9, 30, 1, 31, 1, 31, 3, 31, 411, 8, 31, 1, 31, 1, 31, 3, 31, 415, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 421, 8, 32, 10, 32, 12, 32, 424, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 430, 8, 33, 10, 33, 12, 33, 433, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 452, 8, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 5, 39, 464, 8, 39, 10, 39, 12, 39, 467, 9, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 3, 42, 477, 8, 42, 1, 43, 3, 43, 480, 8, 43, 1, 43, 1, 43, 1, 44, 3, 44, 485, 8, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 510, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 5, 51, 516, 8, 51, 10, 51, 12, 51, 519, 9, 51, 3, 51, 521, 8, 51, 1, 52, 1, 52, 1, 52, 3, 52, 526, 8, 52, 1, 52, 1, 52, 1, 52, 0, 3, 2, 10, 16, 53, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 73, 73, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 555, 0, 106, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 4, 125, 1, 0, 0, 0, 6, 140, 1, 0, 0, 0, 8, 142, 1, 0, 0, 0, 10, 173, 1, 0, 0, 0, 12, 200, 1, 0, 0, 0, 14, 207, 1, 0, 0, 0, 16, 213, 1, 0, 0, 0, 18, 233, 1, 0, 0, 0, 20, 235, 1, 0, 0, 0, 22, 250, 1, 0, 0, 0, 24, 253, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 268, 1, 0, 0, 0, 30, 283, 1, 0, 0, 0, 32, 285, 1, 0, 0, 0, 34, 294, 1, 0, 0, 0, 36, 300, 1, 0, 0, 0, 38, 302, 1, 0, 0, 0, 40, 311, 1, 0, 0, 0, 42, 315, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 332, 1, 0, 0, 0, 50, 340, 1, 0, 0, 0, 52, 348, 1, 0, 0, 0, 54, 350, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 396, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 408, 1, 0, 0, 0, 64, 416, 1, 0, 0, 0, 66, 425, 1, 0, 0, 0, 68, 434, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 453, 1, 0, 0, 0, 76, 457, 1, 0, 0, 0, 78, 460, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 472, 1, 0, 0, 0, 84, 476, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 484, 1, 0, 0, 0, 90, 488, 1, 0, 0, 0, 92, 490, 1, 0, 0, 0, 94, 492, 1, 0, 0, 0, 96, 495, 1, 0, 0, 0, 98, 499, 1, 0, 0, 0, 100, 502, 1, 0, 0, 0, 102, 505, 1, 0, 0, 0, 104, 525, 1, 0, 0, 0, 106, 107, 3, 2, 1, 0, 107, 108, 5, 0, 0, 1, 108, 1, 1, 0, 0, 0, 109, 110, 6, 1, -1, 0, 110, 111, 3, 4, 2, 0, 111, 117, 1, 0, 0, 0, 112, 113, 10, 1, 0, 0, 113, 114, 5, 26, 0, 0, 114, 116, 3, 6, 3, 0, 115, 112, 1, 0, 0, 0, 116, 119, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 3, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 120, 126, 3, 94, 47, 0, 121, 126, 3, 28, 14, 0, 122, 126, 3, 22, 11, 0, 123, 126, 3, 98, 49, 0, 124, 126, 3, 100, 50, 0, 125, 120, 1, 0, 0, 0, 125, 121, 1, 0, 0, 0, 125, 122, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 5, 1, 0, 0, 0, 127, 141, 3, 42, 21, 0, 128, 141, 3, 46, 23, 0, 129, 141, 3, 58, 29, 0, 130, 141, 3, 64, 32, 0, 131, 141, 3, 60, 30, 0, 132, 141, 3, 44, 22, 0, 133, 141, 3, 8, 4, 0, 134, 141, 3, 66, 33, 0, 135, 141, 3, 68, 34, 0, 136, 141, 3, 72, 36, 0, 137, 141, 3, 74, 37, 0, 138, 141, 3, 102, 51, 0, 139, 141, 3, 76, 38, 0, 140, 127, 1, 0, 0, 0, 140, 128, 1, 0, 0, 0, 140, 129, 1, 0, 0, 0, 140, 130, 1, 0, 0, 0, 140, 131, 1, 0, 0, 0, 140, 132, 1, 0, 0, 0, 140, 133, 1, 0, 0, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 140, 139, 1, 0, 0, 0, 141, 7, 1, 0, 0, 0, 142, 143, 5, 18, 0, 0, 143, 144, 3, 10, 5, 0, 144, 9, 1, 0, 0, 0, 145, 146, 6, 5, -1, 0, 146, 147, 5, 44, 0, 0, 147, 174, 3, 10, 5, 7, 148, 174, 3, 14, 7, 0, 149, 174, 3, 12, 6, 0, 150, 152, 3, 14, 7, 0, 151, 153, 5, 44, 0, 0, 152, 151, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 155, 5, 41, 0, 0, 155, 156, 5, 40, 0, 0, 156, 161, 3, 14, 7, 0, 157, 158, 5, 34, 0, 0, 158, 160, 3, 14, 7, 0, 159, 157, 1, 0, 0, 0, 160, 163, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 164, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 165, 5, 50, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 3, 14, 7, 0, 167, 169, 5, 42, 0, 0, 168, 170, 5, 44, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 5, 45, 0, 0, 172, 174, 1, 0, 0, 0, 173, 145, 1, 0, 0, 0, 173, 148, 1, 0, 0, 0, 173, 149, 1, 0, 0, 0, 173, 150, 1, 0, 0, 0, 173, 166, 1, 0, 0, 0, 174, 183, 1, 0, 0, 0, 175, 176, 10, 4, 0, 0, 176, 177, 5, 31, 0, 0, 177, 182, 3, 10, 5, 5, 178, 179, 10, 3, 0, 0, 179, 180, 5, 47, 0, 0, 180, 182, 3, 10, 5, 4, 181, 175, 1, 0, 0, 0, 181, 178, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 11, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 44, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 43, 0, 0, 191, 192, 3, 90, 45, 0, 192, 201, 1, 0, 0, 0, 193, 195, 3, 14, 7, 0, 194, 196, 5, 44, 0, 0, 195, 194, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 198, 5, 49, 0, 0, 198, 199, 3, 90, 45, 0, 199, 201, 1, 0, 0, 0, 200, 186, 1, 0, 0, 0, 200, 193, 1, 0, 0, 0, 201, 13, 1, 0, 0, 0, 202, 208, 3, 16, 8, 0, 203, 204, 3, 16, 8, 0, 204, 205, 3, 92, 46, 0, 205, 206, 3, 16, 8, 0, 206, 208, 1, 0, 0, 0, 207, 202, 1, 0, 0, 0, 207, 203, 1, 0, 0, 0, 208, 15, 1, 0, 0, 0, 209, 210, 6, 8, -1, 0, 210, 214, 3, 18, 9, 0, 211, 212, 7, 0, 0, 0, 212, 214, 3, 16, 8, 3, 213, 209, 1, 0, 0, 0, 213, 211, 1, 0, 0, 0, 214, 223, 1, 0, 0, 0, 215, 216, 10, 2, 0, 0, 216, 217, 7, 1, 0, 0, 217, 222, 3, 16, 8, 3, 218, 219, 10, 1, 0, 0, 219, 220, 7, 0, 0, 0, 220, 222, 3, 16, 8, 2, 221, 215, 1, 0, 0, 0, 221, 218, 1, 0, 0, 0, 222, 225, 1, 0, 0, 0, 223, 221, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 17, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 226, 234, 3, 56, 28, 0, 227, 234, 3, 48, 24, 0, 228, 234, 3, 20, 10, 0, 229, 230, 5, 40, 0, 0, 230, 231, 3, 10, 5, 0, 231, 232, 5, 50, 0, 0, 232, 234, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 233, 227, 1, 0, 0, 0, 233, 228, 1, 0, 0, 0, 233, 229, 1, 0, 0, 0, 234, 19, 1, 0, 0, 0, 235, 236, 3, 52, 26, 0, 236, 246, 5, 40, 0, 0, 237, 247, 5, 61, 0, 0, 238, 243, 3, 10, 5, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 10, 5, 0, 241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 237, 1, 0, 0, 0, 246, 238, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 5, 50, 0, 0, 249, 21, 1, 0, 0, 0, 250, 251, 5, 14, 0, 0, 251, 252, 3, 24, 12, 0, 252, 23, 1, 0, 0, 0, 253, 258, 3, 26, 13, 0, 254, 255, 5, 34, 0, 0, 255, 257, 3, 26, 13, 0, 256, 254, 1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 25, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 267, 3, 10, 5, 0, 262, 263, 3, 48, 24, 0, 263, 264, 5, 33, 0, 0, 264, 265, 3, 10, 5, 0, 265, 267, 1, 0, 0, 0, 266, 261, 1, 0, 0, 0, 266, 262, 1, 0, 0, 0, 267, 27, 1, 0, 0, 0, 268, 269, 5, 6, 0, 0, 269, 274, 3, 30, 15, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 30, 15, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 279, 3, 36, 18, 0, 278, 277, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 282, 3, 32, 16, 0, 281, 280, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 29, 1, 0, 0, 0, 283, 284, 7, 2, 0, 0, 284, 31, 1, 0, 0, 0, 285, 286, 5, 71, 0, 0, 286, 291, 3, 34, 17, 0, 287, 288, 5, 34, 0, 0, 288, 290, 3, 34, 17, 0, 289, 287, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 33, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 294, 295, 3, 90, 45, 0, 295, 296, 5, 33, 0, 0, 296, 297, 3, 90, 45, 0, 297, 35, 1, 0, 0, 0, 298, 301, 3, 38, 19, 0, 299, 301, 3, 40, 20, 0, 300, 298, 1, 0, 0, 0, 300, 299, 1, 0, 0, 0, 301, 37, 1, 0, 0, 0, 302, 303, 5, 72, 0, 0, 303, 308, 3, 30, 15, 0, 304, 305, 5, 34, 0, 0, 305, 307, 3, 30, 15, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 39, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 5, 64, 0, 0, 312, 313, 3, 38, 19, 0, 313, 314, 5, 65, 0, 0, 314, 41, 1, 0, 0, 0, 315, 316, 5, 4, 0, 0, 316, 317, 3, 24, 12, 0, 317, 43, 1, 0, 0, 0, 318, 320, 5, 17, 0, 0, 319, 321, 3, 24, 12, 0, 320, 319, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 324, 1, 0, 0, 0, 322, 323, 5, 30, 0, 0, 323, 325, 3, 24, 12, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 45, 1, 0, 0, 0, 326, 327, 5, 8, 0, 0, 327, 330, 3, 24, 12, 0, 328, 329, 5, 30, 0, 0, 329, 331, 3, 24, 12, 0, 330, 328, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 47, 1, 0, 0, 0, 332, 337, 3, 52, 26, 0, 333, 334, 5, 36, 0, 0, 334, 336, 3, 52, 26, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 49, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 345, 3, 54, 27, 0, 341, 342, 5, 36, 0, 0, 342, 344, 3, 54, 27, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 51, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 7, 3, 0, 0, 349, 53, 1, 0, 0, 0, 350, 351, 5, 77, 0, 0, 351, 55, 1, 0, 0, 0, 352, 395, 5, 45, 0, 0, 353, 354, 3, 88, 44, 0, 354, 355, 5, 66, 0, 0, 355, 395, 1, 0, 0, 0, 356, 395, 3, 86, 43, 0, 357, 395, 3, 88, 44, 0, 358, 395, 3, 82, 41, 0, 359, 395, 5, 48, 0, 0, 360, 395, 3, 90, 45, 0, 361, 362, 5, 64, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 65, 0, 0, 371, 395, 1, 0, 0, 0, 372, 373, 5, 64, 0, 0, 373, 378, 3, 82, 41, 0, 374, 375, 5, 34, 0, 0, 375, 377, 3, 82, 41, 0, 376, 374, 1, 0, 0, 0, 377, 380, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 381, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 381, 382, 5, 65, 0, 0, 382, 395, 1, 0, 0, 0, 383, 384, 5, 64, 0, 0, 384, 389, 3, 90, 45, 0, 385, 386, 5, 34, 0, 0, 386, 388, 3, 90, 45, 0, 387, 385, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 392, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 392, 393, 5, 65, 0, 0, 393, 395, 1, 0, 0, 0, 394, 352, 1, 0, 0, 0, 394, 353, 1, 0, 0, 0, 394, 356, 1, 0, 0, 0, 394, 357, 1, 0, 0, 0, 394, 358, 1, 0, 0, 0, 394, 359, 1, 0, 0, 0, 394, 360, 1, 0, 0, 0, 394, 361, 1, 0, 0, 0, 394, 372, 1, 0, 0, 0, 394, 383, 1, 0, 0, 0, 395, 57, 1, 0, 0, 0, 396, 397, 5, 10, 0, 0, 397, 398, 5, 28, 0, 0, 398, 59, 1, 0, 0, 0, 399, 400, 5, 16, 0, 0, 400, 405, 3, 62, 31, 0, 401, 402, 5, 34, 0, 0, 402, 404, 3, 62, 31, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 61, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 410, 3, 10, 5, 0, 409, 411, 7, 4, 0, 0, 410, 409, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 413, 5, 46, 0, 0, 413, 415, 7, 5, 0, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 63, 1, 0, 0, 0, 416, 417, 5, 9, 0, 0, 417, 422, 3, 50, 25, 0, 418, 419, 5, 34, 0, 0, 419, 421, 3, 50, 25, 0, 420, 418, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 65, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 425, 426, 5, 2, 0, 0, 426, 431, 3, 50, 25, 0, 427, 428, 5, 34, 0, 0, 428, 430, 3, 50, 25, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 67, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 13, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 50, 25, 0, 444, 445, 5, 81, 0, 0, 445, 446, 3, 50, 25, 0, 446, 71, 1, 0, 0, 0, 447, 448, 5, 1, 0, 0, 448, 449, 3, 18, 9, 0, 449, 451, 3, 90, 45, 0, 450, 452, 3, 78, 39, 0, 451, 450, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 73, 1, 0, 0, 0, 453, 454, 5, 7, 0, 0, 454, 455, 3, 18, 9, 0, 455, 456, 3, 90, 45, 0, 456, 75, 1, 0, 0, 0, 457, 458, 5, 12, 0, 0, 458, 459, 3, 48, 24, 0, 459, 77, 1, 0, 0, 0, 460, 465, 3, 80, 40, 0, 461, 462, 5, 34, 0, 0, 462, 464, 3, 80, 40, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 469, 3, 52, 26, 0, 469, 470, 5, 33, 0, 0, 470, 471, 3, 56, 28, 0, 471, 81, 1, 0, 0, 0, 472, 473, 7, 6, 0, 0, 473, 83, 1, 0, 0, 0, 474, 477, 3, 86, 43, 0, 475, 477, 3, 88, 44, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 85, 1, 0, 0, 0, 478, 480, 7, 0, 0, 0, 479, 478, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 5, 29, 0, 0, 482, 87, 1, 0, 0, 0, 483, 485, 7, 0, 0, 0, 484, 483, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 5, 28, 0, 0, 487, 89, 1, 0, 0, 0, 488, 489, 5, 27, 0, 0, 489, 91, 1, 0, 0, 0, 490, 491, 7, 7, 0, 0, 491, 93, 1, 0, 0, 0, 492, 493, 5, 5, 0, 0, 493, 494, 3, 96, 48, 0, 494, 95, 1, 0, 0, 0, 495, 496, 5, 64, 0, 0, 496, 497, 3, 2, 1, 0, 497, 498, 5, 65, 0, 0, 498, 97, 1, 0, 0, 0, 499, 500, 5, 15, 0, 0, 500, 501, 5, 97, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 5, 11, 0, 0, 503, 504, 5, 101, 0, 0, 504, 101, 1, 0, 0, 0, 505, 506, 5, 3, 0, 0, 506, 509, 5, 87, 0, 0, 507, 508, 5, 85, 0, 0, 508, 510, 3, 50, 25, 0, 509, 507, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 520, 1, 0, 0, 0, 511, 512, 5, 86, 0, 0, 512, 517, 3, 104, 52, 0, 513, 514, 5, 34, 0, 0, 514, 516, 3, 104, 52, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 521, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 511, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 103, 1, 0, 0, 0, 522, 523, 3, 50, 25, 0, 523, 524, 5, 33, 0, 0, 524, 526, 1, 0, 0, 0, 525, 522, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 3, 50, 25, 0, 528, 105, 1, 0, 0, 0, 51, 117, 125, 140, 152, 161, 169, 173, 181, 183, 188, 195, 200, 207, 213, 221, 223, 233, 243, 246, 258, 266, 274, 278, 281, 291, 300, 308, 320, 324, 330, 337, 345, 367, 378, 389, 394, 405, 410, 414, 422, 431, 440, 451, 465, 476, 479, 484, 509, 517, 520, 525] \ No newline at end of file +[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 68, 68, 74, 74, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 7, 2, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 3, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 4, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 5, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 6, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 7, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 96ce4569fdd8e..1f9c13c16cdd4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -22,47 +22,47 @@ public class EsqlBaseParser extends Parser { STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, - LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, - PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, - GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, - CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, - EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, OPTIONS=71, METADATA=72, FROM_UNQUOTED_IDENTIFIER=73, - FROM_LINE_COMMENT=74, FROM_MULTILINE_COMMENT=75, FROM_WS=76, ID_PATTERN=77, - PROJECT_LINE_COMMENT=78, PROJECT_MULTILINE_COMMENT=79, PROJECT_WS=80, - AS=81, RENAME_LINE_COMMENT=82, RENAME_MULTILINE_COMMENT=83, RENAME_WS=84, - ON=85, WITH=86, ENRICH_POLICY_NAME=87, ENRICH_LINE_COMMENT=88, ENRICH_MULTILINE_COMMENT=89, - ENRICH_WS=90, ENRICH_FIELD_LINE_COMMENT=91, ENRICH_FIELD_MULTILINE_COMMENT=92, - ENRICH_FIELD_WS=93, MVEXPAND_LINE_COMMENT=94, MVEXPAND_MULTILINE_COMMENT=95, - MVEXPAND_WS=96, INFO=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, - SHOW_WS=100, FUNCTIONS=101, META_LINE_COMMENT=102, META_MULTILINE_COMMENT=103, - META_WS=104, COLON=105, SETTING=106, SETTING_LINE_COMMENT=107, SETTTING_MULTILINE_COMMENT=108, - SETTING_WS=109; + AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, + FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, + OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, + LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, + METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, + FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, + PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, + RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, + ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, + ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, + MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, + SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, + META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, + SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, - RULE_functionExpression = 10, RULE_rowCommand = 11, RULE_fields = 12, - RULE_field = 13, RULE_fromCommand = 14, RULE_fromIdentifier = 15, RULE_fromOptions = 16, - RULE_configOption = 17, RULE_metadata = 18, RULE_metadataOption = 19, - RULE_deprecated_metadata = 20, RULE_evalCommand = 21, RULE_statsCommand = 22, - RULE_inlinestatsCommand = 23, RULE_qualifiedName = 24, RULE_qualifiedNamePattern = 25, - RULE_identifier = 26, RULE_identifierPattern = 27, RULE_constant = 28, - RULE_limitCommand = 29, RULE_sortCommand = 30, RULE_orderExpression = 31, - RULE_keepCommand = 32, RULE_dropCommand = 33, RULE_renameCommand = 34, - RULE_renameClause = 35, RULE_dissectCommand = 36, RULE_grokCommand = 37, - RULE_mvExpandCommand = 38, RULE_commandOptions = 39, RULE_commandOption = 40, - RULE_booleanValue = 41, RULE_numericValue = 42, RULE_decimalValue = 43, - RULE_integerValue = 44, RULE_string = 45, RULE_comparisonOperator = 46, - RULE_explainCommand = 47, RULE_subqueryExpression = 48, RULE_showCommand = 49, - RULE_metaCommand = 50, RULE_enrichCommand = 51, RULE_enrichWithClause = 52; + RULE_functionExpression = 10, RULE_dataType = 11, RULE_rowCommand = 12, + RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_fromIdentifier = 16, + RULE_fromOptions = 17, RULE_configOption = 18, RULE_metadata = 19, RULE_metadataOption = 20, + RULE_deprecated_metadata = 21, RULE_evalCommand = 22, RULE_statsCommand = 23, + RULE_inlinestatsCommand = 24, RULE_qualifiedName = 25, RULE_qualifiedNamePattern = 26, + RULE_identifier = 27, RULE_identifierPattern = 28, RULE_constant = 29, + RULE_limitCommand = 30, RULE_sortCommand = 31, RULE_orderExpression = 32, + RULE_keepCommand = 33, RULE_dropCommand = 34, RULE_renameCommand = 35, + RULE_renameClause = 36, RULE_dissectCommand = 37, RULE_grokCommand = 38, + RULE_mvExpandCommand = 39, RULE_commandOptions = 40, RULE_commandOption = 41, + RULE_booleanValue = 42, RULE_numericValue = 43, RULE_decimalValue = 44, + RULE_integerValue = 45, RULE_string = 46, RULE_comparisonOperator = 47, + RULE_explainCommand = 48, RULE_subqueryExpression = 49, RULE_showCommand = 50, + RULE_metaCommand = 51, RULE_enrichCommand = 52, RULE_enrichWithClause = 53; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", - "primaryExpression", "functionExpression", "rowCommand", "fields", "field", - "fromCommand", "fromIdentifier", "fromOptions", "configOption", "metadata", - "metadataOption", "deprecated_metadata", "evalCommand", "statsCommand", + "primaryExpression", "functionExpression", "dataType", "rowCommand", + "fields", "field", "fromCommand", "fromIdentifier", "fromOptions", "configOption", + "metadata", "metadataOption", "deprecated_metadata", "evalCommand", "statsCommand", "inlinestatsCommand", "qualifiedName", "qualifiedNamePattern", "identifier", "identifierPattern", "constant", "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", @@ -80,7 +80,7 @@ private static String[] makeLiteralNames() { "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", @@ -98,10 +98,10 @@ private static String[] makeSymbolicNames() { "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", - "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", - "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", + "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", + "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", @@ -199,9 +199,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(106); + setState(108); query(0); - setState(107); + setState(109); match(EOF); } } @@ -297,11 +297,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(110); + setState(112); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(117); + setState(119); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -312,16 +312,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(112); + setState(114); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(113); + setState(115); match(PIPE); - setState(114); + setState(116); processingCommand(); } } } - setState(119); + setState(121); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -379,41 +379,41 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(125); + setState(127); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(120); + setState(122); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(121); + setState(123); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(122); + setState(124); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(123); + setState(125); showCommand(); } break; case META: enterOuterAlt(_localctx, 5); { - setState(124); + setState(126); metaCommand(); } break; @@ -497,97 +497,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(140); + setState(142); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(127); + setState(129); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(128); + setState(130); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(129); + setState(131); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(130); + setState(132); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(131); + setState(133); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(132); + setState(134); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(133); + setState(135); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(134); + setState(136); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(135); + setState(137); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(136); + setState(138); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(137); + setState(139); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(138); + setState(140); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(139); + setState(141); mvExpandCommand(); } break; @@ -638,9 +638,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(142); + setState(144); match(WHERE); - setState(143); + setState(145); booleanExpression(0); } } @@ -835,7 +835,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(173); + setState(175); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -844,9 +844,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(146); + setState(148); match(NOT); - setState(147); + setState(149); booleanExpression(7); } break; @@ -855,7 +855,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(148); + setState(150); valueExpression(); } break; @@ -864,7 +864,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(149); + setState(151); regexBooleanExpression(); } break; @@ -873,41 +873,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(150); - valueExpression(); setState(152); + valueExpression(); + setState(154); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(151); + setState(153); match(NOT); } } - setState(154); + setState(156); match(IN); - setState(155); + setState(157); match(LP); - setState(156); + setState(158); valueExpression(); - setState(161); + setState(163); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(157); + setState(159); match(COMMA); - setState(158); + setState(160); valueExpression(); } } - setState(163); + setState(165); _errHandler.sync(this); _la = _input.LA(1); } - setState(164); + setState(166); match(RP); } break; @@ -916,27 +916,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(166); + setState(168); valueExpression(); - setState(167); - match(IS); setState(169); + match(IS); + setState(171); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(168); + setState(170); match(NOT); } } - setState(171); + setState(173); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(183); + setState(185); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -944,7 +944,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(181); + setState(183); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -952,11 +952,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(175); + setState(177); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(176); + setState(178); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(177); + setState(179); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -965,18 +965,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(178); + setState(180); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(179); + setState(181); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(180); + setState(182); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(185); + setState(187); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1031,48 +1031,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(200); + setState(202); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(186); - valueExpression(); setState(188); + valueExpression(); + setState(190); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(187); + setState(189); match(NOT); } } - setState(190); + setState(192); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(191); + setState(193); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(193); - valueExpression(); setState(195); + valueExpression(); + setState(197); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(194); + setState(196); match(NOT); } } - setState(197); + setState(199); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(198); + setState(200); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1158,14 +1158,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(207); + setState(209); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(202); + setState(204); operatorExpression(0); } break; @@ -1173,11 +1173,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(203); + setState(205); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(204); + setState(206); comparisonOperator(); - setState(205); + setState(207); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1302,7 +1302,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(213); + setState(215); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1311,8 +1311,8 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(210); - primaryExpression(); + setState(212); + primaryExpression(0); } break; case 2: @@ -1320,7 +1320,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(211); + setState(213); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1331,13 +1331,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(212); + setState(214); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(223); + setState(225); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1345,7 +1345,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(221); + setState(223); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1353,12 +1353,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(215); + setState(217); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(216); + setState(218); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { + if ( !(((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1366,7 +1366,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(217); + setState(219); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1375,9 +1375,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(218); + setState(220); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(219); + setState(221); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1388,14 +1388,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(220); + setState(222); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(225); + setState(227); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1448,6 +1448,31 @@ public T accept(ParseTreeVisitor visitor) { } } @SuppressWarnings("CheckReturnValue") + public static class InlineCastContext extends PrimaryExpressionContext { + public PrimaryExpressionContext primaryExpression() { + return getRuleContext(PrimaryExpressionContext.class,0); + } + public TerminalNode CAST_OP() { return getToken(EsqlBaseParser.CAST_OP, 0); } + public DataTypeContext dataType() { + return getRuleContext(DataTypeContext.class,0); + } + @SuppressWarnings("this-escape") + public InlineCastContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInlineCast(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitInlineCast(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitInlineCast(this); + else return visitor.visitChildren(this); + } + } + @SuppressWarnings("CheckReturnValue") public static class ConstantDefaultContext extends PrimaryExpressionContext { public ConstantContext constant() { return getRuleContext(ConstantContext.class,0); @@ -1514,49 +1539,91 @@ public T accept(ParseTreeVisitor visitor) { } public final PrimaryExpressionContext primaryExpression() throws RecognitionException { - PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_primaryExpression); + return primaryExpression(0); + } + + private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState); + PrimaryExpressionContext _prevctx = _localctx; + int _startState = 18; + enterRecursionRule(_localctx, 18, RULE_primaryExpression, _p); try { - setState(233); + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(236); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: - _localctx = new ConstantDefaultContext(_localctx); - enterOuterAlt(_localctx, 1); { - setState(226); + _localctx = new ConstantDefaultContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(229); constant(); } break; case 2: - _localctx = new DereferenceContext(_localctx); - enterOuterAlt(_localctx, 2); { - setState(227); + _localctx = new DereferenceContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(230); qualifiedName(); } break; case 3: - _localctx = new FunctionContext(_localctx); - enterOuterAlt(_localctx, 3); { - setState(228); + _localctx = new FunctionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(231); functionExpression(); } break; case 4: - _localctx = new ParenthesizedExpressionContext(_localctx); - enterOuterAlt(_localctx, 4); { - setState(229); + _localctx = new ParenthesizedExpressionContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(232); match(LP); - setState(230); + setState(233); booleanExpression(0); - setState(231); + setState(234); match(RP); } break; } + _ctx.stop = _input.LT(-1); + setState(243); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,17,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; + { + { + _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); + setState(238); + if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); + setState(239); + match(CAST_OP); + setState(240); + dataType(); + } + } + } + setState(245); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,17,_ctx); + } + } } catch (RecognitionException re) { _localctx.exception = re; @@ -1564,7 +1631,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _errHandler.recover(this, re); } finally { - exitRule(); + unrollRecursionContexts(_parentctx); } return _localctx; } @@ -1614,16 +1681,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(235); + setState(246); identifier(); - setState(236); + setState(247); match(LP); - setState(246); + setState(257); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(237); + setState(248); match(ASTERISK); } break; @@ -1643,21 +1710,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(238); + setState(249); booleanExpression(0); - setState(243); + setState(254); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(239); + setState(250); match(COMMA); - setState(240); + setState(251); booleanExpression(0); } } - setState(245); + setState(256); _errHandler.sync(this); _la = _input.LA(1); } @@ -1669,7 +1736,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(248); + setState(259); match(RP); } } @@ -1684,6 +1751,64 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class DataTypeContext extends ParserRuleContext { + @SuppressWarnings("this-escape") + public DataTypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_dataType; } + + @SuppressWarnings("this-escape") + public DataTypeContext() { } + public void copyFrom(DataTypeContext ctx) { + super.copyFrom(ctx); + } + } + @SuppressWarnings("CheckReturnValue") + public static class ToDataTypeContext extends DataTypeContext { + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + @SuppressWarnings("this-escape") + public ToDataTypeContext(DataTypeContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterToDataType(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitToDataType(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitToDataType(this); + else return visitor.visitChildren(this); + } + } + + public final DataTypeContext dataType() throws RecognitionException { + DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); + enterRule(_localctx, 22, RULE_dataType); + try { + _localctx = new ToDataTypeContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(261); + identifier(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class RowCommandContext extends ParserRuleContext { public TerminalNode ROW() { return getToken(EsqlBaseParser.ROW, 0); } @@ -1712,13 +1837,13 @@ public T accept(ParseTreeVisitor visitor) { public final RowCommandContext rowCommand() throws RecognitionException { RowCommandContext _localctx = new RowCommandContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_rowCommand); + enterRule(_localctx, 24, RULE_rowCommand); try { enterOuterAlt(_localctx, 1); { - setState(250); + setState(263); match(ROW); - setState(251); + setState(264); fields(); } } @@ -1767,30 +1892,30 @@ public T accept(ParseTreeVisitor visitor) { public final FieldsContext fields() throws RecognitionException { FieldsContext _localctx = new FieldsContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_fields); + enterRule(_localctx, 26, RULE_fields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(253); + setState(266); field(); - setState(258); + setState(271); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,19,_ctx); + _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(254); + setState(267); match(COMMA); - setState(255); + setState(268); field(); } } } - setState(260); + setState(273); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,19,_ctx); + _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } } } @@ -1836,26 +1961,26 @@ public T accept(ParseTreeVisitor visitor) { public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_field); + enterRule(_localctx, 28, RULE_field); try { - setState(266); + setState(279); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(261); + setState(274); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(262); + setState(275); qualifiedName(); - setState(263); + setState(276); match(ASSIGN); - setState(264); + setState(277); booleanExpression(0); } break; @@ -1913,49 +2038,49 @@ public T accept(ParseTreeVisitor visitor) { public final FromCommandContext fromCommand() throws RecognitionException { FromCommandContext _localctx = new FromCommandContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_fromCommand); + enterRule(_localctx, 30, RULE_fromCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(268); + setState(281); match(FROM); - setState(269); + setState(282); fromIdentifier(); - setState(274); + setState(287); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(270); + setState(283); match(COMMA); - setState(271); + setState(284); fromIdentifier(); } } } - setState(276); + setState(289); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(278); + setState(291); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(277); + setState(290); metadata(); } break; } - setState(281); + setState(294); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(280); + setState(293); fromOptions(); } break; @@ -1999,12 +2124,12 @@ public T accept(ParseTreeVisitor visitor) { public final FromIdentifierContext fromIdentifier() throws RecognitionException { FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_fromIdentifier); + enterRule(_localctx, 32, RULE_fromIdentifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(283); + setState(296); _la = _input.LA(1); if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2062,32 +2187,32 @@ public T accept(ParseTreeVisitor visitor) { public final FromOptionsContext fromOptions() throws RecognitionException { FromOptionsContext _localctx = new FromOptionsContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_fromOptions); + enterRule(_localctx, 34, RULE_fromOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(285); + setState(298); match(OPTIONS); - setState(286); + setState(299); configOption(); - setState(291); + setState(304); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(287); + setState(300); match(COMMA); - setState(288); + setState(301); configOption(); } } } - setState(293); + setState(306); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } } } @@ -2133,15 +2258,15 @@ public T accept(ParseTreeVisitor visitor) { public final ConfigOptionContext configOption() throws RecognitionException { ConfigOptionContext _localctx = new ConfigOptionContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_configOption); + enterRule(_localctx, 36, RULE_configOption); try { enterOuterAlt(_localctx, 1); { - setState(294); + setState(307); string(); - setState(295); + setState(308); match(ASSIGN); - setState(296); + setState(309); string(); } } @@ -2186,22 +2311,22 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_metadata); + enterRule(_localctx, 38, RULE_metadata); try { - setState(300); + setState(313); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(298); + setState(311); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(299); + setState(312); deprecated_metadata(); } break; @@ -2255,32 +2380,32 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataOptionContext metadataOption() throws RecognitionException { MetadataOptionContext _localctx = new MetadataOptionContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_metadataOption); + enterRule(_localctx, 40, RULE_metadataOption); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(302); + setState(315); match(METADATA); - setState(303); + setState(316); fromIdentifier(); - setState(308); + setState(321); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(304); + setState(317); match(COMMA); - setState(305); + setState(318); fromIdentifier(); } } } - setState(310); + setState(323); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } } } @@ -2323,15 +2448,15 @@ public T accept(ParseTreeVisitor visitor) { public final Deprecated_metadataContext deprecated_metadata() throws RecognitionException { Deprecated_metadataContext _localctx = new Deprecated_metadataContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_deprecated_metadata); + enterRule(_localctx, 42, RULE_deprecated_metadata); try { enterOuterAlt(_localctx, 1); { - setState(311); + setState(324); match(OPENING_BRACKET); - setState(312); + setState(325); metadataOption(); - setState(313); + setState(326); match(CLOSING_BRACKET); } } @@ -2374,13 +2499,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_evalCommand); + enterRule(_localctx, 44, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(315); + setState(328); match(EVAL); - setState(316); + setState(329); fields(); } } @@ -2429,30 +2554,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_statsCommand); + enterRule(_localctx, 46, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(318); + setState(331); match(STATS); - setState(320); + setState(333); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(319); + setState(332); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(324); + setState(337); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(322); + setState(335); match(BY); - setState(323); + setState(336); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2504,22 +2629,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_inlinestatsCommand); + enterRule(_localctx, 48, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(326); + setState(339); match(INLINESTATS); - setState(327); + setState(340); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(330); + setState(343); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(328); + setState(341); match(BY); - setState(329); + setState(342); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2571,30 +2696,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_qualifiedName); + enterRule(_localctx, 50, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(332); + setState(345); identifier(); - setState(337); + setState(350); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,30,_ctx); + _alt = getInterpreter().adaptivePredict(_input,31,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(333); + setState(346); match(DOT); - setState(334); + setState(347); identifier(); } } } - setState(339); + setState(352); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,30,_ctx); + _alt = getInterpreter().adaptivePredict(_input,31,_ctx); } } } @@ -2643,30 +2768,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_qualifiedNamePattern); + enterRule(_localctx, 52, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(340); + setState(353); identifierPattern(); - setState(345); + setState(358); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(341); + setState(354); match(DOT); - setState(342); + setState(355); identifierPattern(); } } } - setState(347); + setState(360); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } } } @@ -2707,12 +2832,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_identifier); + enterRule(_localctx, 54, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(348); + setState(361); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2760,11 +2885,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_identifierPattern); + enterRule(_localctx, 56, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(350); + setState(363); match(ID_PATTERN); } } @@ -3030,17 +3155,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_constant); + enterRule(_localctx, 58, RULE_constant); int _la; try { - setState(394); + setState(407); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(352); + setState(365); match(NULL); } break; @@ -3048,9 +3173,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(353); + setState(366); integerValue(); - setState(354); + setState(367); match(UNQUOTED_IDENTIFIER); } break; @@ -3058,7 +3183,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(356); + setState(369); decimalValue(); } break; @@ -3066,7 +3191,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(357); + setState(370); integerValue(); } break; @@ -3074,7 +3199,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(358); + setState(371); booleanValue(); } break; @@ -3082,7 +3207,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(359); + setState(372); match(PARAM); } break; @@ -3090,7 +3215,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(360); + setState(373); string(); } break; @@ -3098,27 +3223,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(361); + setState(374); match(OPENING_BRACKET); - setState(362); + setState(375); numericValue(); - setState(367); + setState(380); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(363); + setState(376); match(COMMA); - setState(364); + setState(377); numericValue(); } } - setState(369); + setState(382); _errHandler.sync(this); _la = _input.LA(1); } - setState(370); + setState(383); match(CLOSING_BRACKET); } break; @@ -3126,27 +3251,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(372); + setState(385); match(OPENING_BRACKET); - setState(373); + setState(386); booleanValue(); - setState(378); + setState(391); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(374); + setState(387); match(COMMA); - setState(375); + setState(388); booleanValue(); } } - setState(380); + setState(393); _errHandler.sync(this); _la = _input.LA(1); } - setState(381); + setState(394); match(CLOSING_BRACKET); } break; @@ -3154,27 +3279,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(383); + setState(396); match(OPENING_BRACKET); - setState(384); + setState(397); string(); - setState(389); + setState(402); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(385); + setState(398); match(COMMA); - setState(386); + setState(399); string(); } } - setState(391); + setState(404); _errHandler.sync(this); _la = _input.LA(1); } - setState(392); + setState(405); match(CLOSING_BRACKET); } break; @@ -3217,13 +3342,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_limitCommand); + enterRule(_localctx, 60, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(396); + setState(409); match(LIMIT); - setState(397); + setState(410); match(INTEGER_LITERAL); } } @@ -3273,32 +3398,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_sortCommand); + enterRule(_localctx, 62, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(399); + setState(412); match(SORT); - setState(400); + setState(413); orderExpression(); - setState(405); + setState(418); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(401); + setState(414); match(COMMA); - setState(402); + setState(415); orderExpression(); } } } - setState(407); + setState(420); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } } } @@ -3347,19 +3472,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_orderExpression); + enterRule(_localctx, 64, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(408); + setState(421); booleanExpression(0); - setState(410); + setState(423); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: { - setState(409); + setState(422); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3373,14 +3498,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(414); + setState(427); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: { - setState(412); + setState(425); match(NULLS); - setState(413); + setState(426); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3442,32 +3567,32 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_keepCommand); + enterRule(_localctx, 66, RULE_keepCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(416); + setState(429); match(KEEP); - setState(417); + setState(430); qualifiedNamePattern(); - setState(422); + setState(435); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(418); + setState(431); match(COMMA); - setState(419); + setState(432); qualifiedNamePattern(); } } } - setState(424); + setState(437); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3517,32 +3642,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_dropCommand); + enterRule(_localctx, 68, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(425); + setState(438); match(DROP); - setState(426); + setState(439); qualifiedNamePattern(); - setState(431); + setState(444); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(427); + setState(440); match(COMMA); - setState(428); + setState(441); qualifiedNamePattern(); } } } - setState(433); + setState(446); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } } } @@ -3592,32 +3717,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_renameCommand); + enterRule(_localctx, 70, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(434); + setState(447); match(RENAME); - setState(435); + setState(448); renameClause(); - setState(440); + setState(453); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,42,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(436); + setState(449); match(COMMA); - setState(437); + setState(450); renameClause(); } } } - setState(442); + setState(455); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,42,_ctx); } } } @@ -3665,15 +3790,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_renameClause); + enterRule(_localctx, 72, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(443); + setState(456); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(444); + setState(457); match(AS); - setState(445); + setState(458); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3722,22 +3847,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_dissectCommand); + enterRule(_localctx, 74, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(447); + setState(460); match(DISSECT); - setState(448); - primaryExpression(); - setState(449); + setState(461); + primaryExpression(0); + setState(462); string(); - setState(451); + setState(464); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: { - setState(450); + setState(463); commandOptions(); } break; @@ -3786,15 +3911,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_grokCommand); + enterRule(_localctx, 76, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(453); + setState(466); match(GROK); - setState(454); - primaryExpression(); - setState(455); + setState(467); + primaryExpression(0); + setState(468); string(); } } @@ -3837,13 +3962,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_mvExpandCommand); + enterRule(_localctx, 78, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(457); + setState(470); match(MV_EXPAND); - setState(458); + setState(471); qualifiedName(); } } @@ -3892,30 +4017,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_commandOptions); + enterRule(_localctx, 80, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(460); + setState(473); commandOption(); - setState(465); + setState(478); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(461); + setState(474); match(COMMA); - setState(462); + setState(475); commandOption(); } } } - setState(467); + setState(480); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -3961,15 +4086,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_commandOption); + enterRule(_localctx, 82, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(468); + setState(481); identifier(); - setState(469); + setState(482); match(ASSIGN); - setState(470); + setState(483); constant(); } } @@ -4010,12 +4135,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_booleanValue); + enterRule(_localctx, 84, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(472); + setState(485); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4068,22 +4193,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_numericValue); + enterRule(_localctx, 86, RULE_numericValue); try { - setState(476); + setState(489); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(474); + setState(487); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(475); + setState(488); integerValue(); } break; @@ -4127,17 +4252,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_decimalValue); + enterRule(_localctx, 88, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(479); + setState(492); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(478); + setState(491); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4150,7 +4275,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(481); + setState(494); match(DECIMAL_LITERAL); } } @@ -4192,17 +4317,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_integerValue); + enterRule(_localctx, 90, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(484); + setState(497); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(483); + setState(496); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4215,7 +4340,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(486); + setState(499); match(INTEGER_LITERAL); } } @@ -4255,11 +4380,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_string); + enterRule(_localctx, 92, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(488); + setState(501); match(QUOTED_STRING); } } @@ -4304,14 +4429,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_comparisonOperator); + enterRule(_localctx, 94, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(490); + setState(503); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 1125899906842624000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4360,13 +4485,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_explainCommand); + enterRule(_localctx, 96, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(492); + setState(505); match(EXPLAIN); - setState(493); + setState(506); subqueryExpression(); } } @@ -4410,15 +4535,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_subqueryExpression); + enterRule(_localctx, 98, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(508); match(OPENING_BRACKET); - setState(496); + setState(509); query(0); - setState(497); + setState(510); match(CLOSING_BRACKET); } } @@ -4470,14 +4595,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_showCommand); + enterRule(_localctx, 100, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(499); + setState(512); match(SHOW); - setState(500); + setState(513); match(INFO); } } @@ -4529,14 +4654,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_metaCommand); + enterRule(_localctx, 102, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(502); + setState(515); match(META); - setState(503); + setState(516); match(FUNCTIONS); } } @@ -4594,53 +4719,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_enrichCommand); + enterRule(_localctx, 104, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(505); + setState(518); match(ENRICH); - setState(506); + setState(519); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(509); + setState(522); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(507); + setState(520); match(ON); - setState(508); + setState(521); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(520); + setState(533); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(511); + setState(524); match(WITH); - setState(512); + setState(525); enrichWithClause(); - setState(517); + setState(530); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(513); + setState(526); match(COMMA); - setState(514); + setState(527); enrichWithClause(); } } } - setState(519); + setState(532); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } } break; @@ -4691,23 +4816,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_enrichWithClause); + enterRule(_localctx, 106, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(525); + setState(538); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(522); + setState(535); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(523); + setState(536); match(ASSIGN); } break; } - setState(527); + setState(540); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4730,6 +4855,8 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); case 8: return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); + case 9: + return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); } return true; } @@ -4758,9 +4885,16 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } return true; } + private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { + switch (predIndex) { + case 5: + return precpred(_ctx, 1); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001m\u0212\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001n\u021f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4774,329 +4908,337 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0005\u0001t\b\u0001\n\u0001\f\u0001w\t\u0001\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002~\b\u0002\u0001\u0003"+ + "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0005\u0001v\b\u0001\n\u0001\f\u0001y\t\u0001\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0080\b\u0002"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0003\u0003\u008d\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ + "\u0001\u0003\u0003\u0003\u008f\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0003\u0005\u009b\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00a2\b\u0005\n\u0005\f\u0005\u00a5"+ + "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ + "\u0005\u00ac\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b0\b\u0005"+ "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0003\u0005\u0099\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0005\u0005\u00a0\b\u0005\n\u0005\f\u0005\u00a3\t\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00aa"+ - "\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ae\b\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ - "\u00b6\b\u0005\n\u0005\f\u0005\u00b9\t\u0005\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00bd\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00c4\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00c9\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0003\u0007\u00d0\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ - "\b\u00d6\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00de"+ - "\b\b\n\b\f\b\u00e1\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0003\t\u00ea\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0005\n\u00f2\b\n\n\n\f\n\u00f5\t\n\u0003\n\u00f7\b\n\u0001\n\u0001"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f"+ - "\u0101\b\f\n\f\f\f\u0104\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003"+ - "\r\u010b\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e"+ - "\u0111\b\u000e\n\u000e\f\u000e\u0114\t\u000e\u0001\u000e\u0003\u000e\u0117"+ - "\b\u000e\u0001\u000e\u0003\u000e\u011a\b\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010\u0122\b\u0010"+ - "\n\u0010\f\u0010\u0125\t\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0012\u0001\u0012\u0003\u0012\u012d\b\u0012\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0133\b\u0013\n\u0013\f\u0013"+ - "\u0136\t\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0003\u0016\u0141\b\u0016"+ - "\u0001\u0016\u0001\u0016\u0003\u0016\u0145\b\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0003\u0017\u014b\b\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0005\u0018\u0150\b\u0018\n\u0018\f\u0018\u0153\t\u0018\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0158\b\u0019\n\u0019\f\u0019"+ - "\u015b\t\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u016e\b\u001c\n\u001c\f\u001c\u0171\t\u001c\u0001\u001c\u0001"+ - "\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0179"+ - "\b\u001c\n\u001c\f\u001c\u017c\t\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0184\b\u001c\n\u001c"+ - "\f\u001c\u0187\t\u001c\u0001\u001c\u0001\u001c\u0003\u001c\u018b\b\u001c"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e"+ - "\u0001\u001e\u0005\u001e\u0194\b\u001e\n\u001e\f\u001e\u0197\t\u001e\u0001"+ - "\u001f\u0001\u001f\u0003\u001f\u019b\b\u001f\u0001\u001f\u0001\u001f\u0003"+ - "\u001f\u019f\b\u001f\u0001 \u0001 \u0001 \u0001 \u0005 \u01a5\b \n \f"+ - " \u01a8\t \u0001!\u0001!\u0001!\u0001!\u0005!\u01ae\b!\n!\f!\u01b1\t!"+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b7\b\"\n\"\f\"\u01ba\t\"\u0001"+ - "#\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0003$\u01c4\b$\u0001"+ - "%\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0005"+ - "\'\u01d0\b\'\n\'\f\'\u01d3\t\'\u0001(\u0001(\u0001(\u0001(\u0001)\u0001"+ - ")\u0001*\u0001*\u0003*\u01dd\b*\u0001+\u0003+\u01e0\b+\u0001+\u0001+\u0001"+ - ",\u0003,\u01e5\b,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001/\u0001"+ - "/\u0001/\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00012\u0001"+ - "2\u00012\u00013\u00013\u00013\u00013\u00033\u01fe\b3\u00013\u00013\u0001"+ - "3\u00013\u00053\u0204\b3\n3\f3\u0207\t3\u00033\u0209\b3\u00014\u00014"+ - "\u00014\u00034\u020e\b4\u00014\u00014\u00014\u0000\u0003\u0002\n\u0010"+ - "5\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh\u0000\b\u0001\u0000"+ - ";<\u0001\u0000=?\u0002\u0000CCII\u0001\u0000BC\u0002\u0000 ##\u0001\u0000"+ - "&\'\u0002\u0000%%33\u0002\u0000446:\u022b\u0000j\u0001\u0000\u0000\u0000"+ - "\u0002m\u0001\u0000\u0000\u0000\u0004}\u0001\u0000\u0000\u0000\u0006\u008c"+ - "\u0001\u0000\u0000\u0000\b\u008e\u0001\u0000\u0000\u0000\n\u00ad\u0001"+ - "\u0000\u0000\u0000\f\u00c8\u0001\u0000\u0000\u0000\u000e\u00cf\u0001\u0000"+ - "\u0000\u0000\u0010\u00d5\u0001\u0000\u0000\u0000\u0012\u00e9\u0001\u0000"+ - "\u0000\u0000\u0014\u00eb\u0001\u0000\u0000\u0000\u0016\u00fa\u0001\u0000"+ - "\u0000\u0000\u0018\u00fd\u0001\u0000\u0000\u0000\u001a\u010a\u0001\u0000"+ - "\u0000\u0000\u001c\u010c\u0001\u0000\u0000\u0000\u001e\u011b\u0001\u0000"+ - "\u0000\u0000 \u011d\u0001\u0000\u0000\u0000\"\u0126\u0001\u0000\u0000"+ - "\u0000$\u012c\u0001\u0000\u0000\u0000&\u012e\u0001\u0000\u0000\u0000("+ - "\u0137\u0001\u0000\u0000\u0000*\u013b\u0001\u0000\u0000\u0000,\u013e\u0001"+ - "\u0000\u0000\u0000.\u0146\u0001\u0000\u0000\u00000\u014c\u0001\u0000\u0000"+ - "\u00002\u0154\u0001\u0000\u0000\u00004\u015c\u0001\u0000\u0000\u00006"+ - "\u015e\u0001\u0000\u0000\u00008\u018a\u0001\u0000\u0000\u0000:\u018c\u0001"+ - "\u0000\u0000\u0000<\u018f\u0001\u0000\u0000\u0000>\u0198\u0001\u0000\u0000"+ - "\u0000@\u01a0\u0001\u0000\u0000\u0000B\u01a9\u0001\u0000\u0000\u0000D"+ - "\u01b2\u0001\u0000\u0000\u0000F\u01bb\u0001\u0000\u0000\u0000H\u01bf\u0001"+ - "\u0000\u0000\u0000J\u01c5\u0001\u0000\u0000\u0000L\u01c9\u0001\u0000\u0000"+ - "\u0000N\u01cc\u0001\u0000\u0000\u0000P\u01d4\u0001\u0000\u0000\u0000R"+ - "\u01d8\u0001\u0000\u0000\u0000T\u01dc\u0001\u0000\u0000\u0000V\u01df\u0001"+ - "\u0000\u0000\u0000X\u01e4\u0001\u0000\u0000\u0000Z\u01e8\u0001\u0000\u0000"+ - "\u0000\\\u01ea\u0001\u0000\u0000\u0000^\u01ec\u0001\u0000\u0000\u0000"+ - "`\u01ef\u0001\u0000\u0000\u0000b\u01f3\u0001\u0000\u0000\u0000d\u01f6"+ - "\u0001\u0000\u0000\u0000f\u01f9\u0001\u0000\u0000\u0000h\u020d\u0001\u0000"+ - "\u0000\u0000jk\u0003\u0002\u0001\u0000kl\u0005\u0000\u0000\u0001l\u0001"+ - "\u0001\u0000\u0000\u0000mn\u0006\u0001\uffff\uffff\u0000no\u0003\u0004"+ - "\u0002\u0000ou\u0001\u0000\u0000\u0000pq\n\u0001\u0000\u0000qr\u0005\u001a"+ - "\u0000\u0000rt\u0003\u0006\u0003\u0000sp\u0001\u0000\u0000\u0000tw\u0001"+ - "\u0000\u0000\u0000us\u0001\u0000\u0000\u0000uv\u0001\u0000\u0000\u0000"+ - "v\u0003\u0001\u0000\u0000\u0000wu\u0001\u0000\u0000\u0000x~\u0003^/\u0000"+ - "y~\u0003\u001c\u000e\u0000z~\u0003\u0016\u000b\u0000{~\u0003b1\u0000|"+ - "~\u0003d2\u0000}x\u0001\u0000\u0000\u0000}y\u0001\u0000\u0000\u0000}z"+ - "\u0001\u0000\u0000\u0000}{\u0001\u0000\u0000\u0000}|\u0001\u0000\u0000"+ - "\u0000~\u0005\u0001\u0000\u0000\u0000\u007f\u008d\u0003*\u0015\u0000\u0080"+ - "\u008d\u0003.\u0017\u0000\u0081\u008d\u0003:\u001d\u0000\u0082\u008d\u0003"+ - "@ \u0000\u0083\u008d\u0003<\u001e\u0000\u0084\u008d\u0003,\u0016\u0000"+ - "\u0085\u008d\u0003\b\u0004\u0000\u0086\u008d\u0003B!\u0000\u0087\u008d"+ - "\u0003D\"\u0000\u0088\u008d\u0003H$\u0000\u0089\u008d\u0003J%\u0000\u008a"+ - "\u008d\u0003f3\u0000\u008b\u008d\u0003L&\u0000\u008c\u007f\u0001\u0000"+ - "\u0000\u0000\u008c\u0080\u0001\u0000\u0000\u0000\u008c\u0081\u0001\u0000"+ - "\u0000\u0000\u008c\u0082\u0001\u0000\u0000\u0000\u008c\u0083\u0001\u0000"+ - "\u0000\u0000\u008c\u0084\u0001\u0000\u0000\u0000\u008c\u0085\u0001\u0000"+ - "\u0000\u0000\u008c\u0086\u0001\u0000\u0000\u0000\u008c\u0087\u0001\u0000"+ - "\u0000\u0000\u008c\u0088\u0001\u0000\u0000\u0000\u008c\u0089\u0001\u0000"+ - "\u0000\u0000\u008c\u008a\u0001\u0000\u0000\u0000\u008c\u008b\u0001\u0000"+ - "\u0000\u0000\u008d\u0007\u0001\u0000\u0000\u0000\u008e\u008f\u0005\u0012"+ - "\u0000\u0000\u008f\u0090\u0003\n\u0005\u0000\u0090\t\u0001\u0000\u0000"+ - "\u0000\u0091\u0092\u0006\u0005\uffff\uffff\u0000\u0092\u0093\u0005,\u0000"+ - "\u0000\u0093\u00ae\u0003\n\u0005\u0007\u0094\u00ae\u0003\u000e\u0007\u0000"+ - "\u0095\u00ae\u0003\f\u0006\u0000\u0096\u0098\u0003\u000e\u0007\u0000\u0097"+ - "\u0099\u0005,\u0000\u0000\u0098\u0097\u0001\u0000\u0000\u0000\u0098\u0099"+ - "\u0001\u0000\u0000\u0000\u0099\u009a\u0001\u0000\u0000\u0000\u009a\u009b"+ - "\u0005)\u0000\u0000\u009b\u009c\u0005(\u0000\u0000\u009c\u00a1\u0003\u000e"+ - "\u0007\u0000\u009d\u009e\u0005\"\u0000\u0000\u009e\u00a0\u0003\u000e\u0007"+ - "\u0000\u009f\u009d\u0001\u0000\u0000\u0000\u00a0\u00a3\u0001\u0000\u0000"+ - "\u0000\u00a1\u009f\u0001\u0000\u0000\u0000\u00a1\u00a2\u0001\u0000\u0000"+ - "\u0000\u00a2\u00a4\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000"+ - "\u0000\u00a4\u00a5\u00052\u0000\u0000\u00a5\u00ae\u0001\u0000\u0000\u0000"+ - "\u00a6\u00a7\u0003\u000e\u0007\u0000\u00a7\u00a9\u0005*\u0000\u0000\u00a8"+ - "\u00aa\u0005,\u0000\u0000\u00a9\u00a8\u0001\u0000\u0000\u0000\u00a9\u00aa"+ - "\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001\u0000\u0000\u0000\u00ab\u00ac"+ - "\u0005-\u0000\u0000\u00ac\u00ae\u0001\u0000\u0000\u0000\u00ad\u0091\u0001"+ - "\u0000\u0000\u0000\u00ad\u0094\u0001\u0000\u0000\u0000\u00ad\u0095\u0001"+ - "\u0000\u0000\u0000\u00ad\u0096\u0001\u0000\u0000\u0000\u00ad\u00a6\u0001"+ - "\u0000\u0000\u0000\u00ae\u00b7\u0001\u0000\u0000\u0000\u00af\u00b0\n\u0004"+ - "\u0000\u0000\u00b0\u00b1\u0005\u001f\u0000\u0000\u00b1\u00b6\u0003\n\u0005"+ - "\u0005\u00b2\u00b3\n\u0003\u0000\u0000\u00b3\u00b4\u0005/\u0000\u0000"+ - "\u00b4\u00b6\u0003\n\u0005\u0004\u00b5\u00af\u0001\u0000\u0000\u0000\u00b5"+ - "\u00b2\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000\u00b7"+ - "\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000\u00b8"+ - "\u000b\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00ba"+ - "\u00bc\u0003\u000e\u0007\u0000\u00bb\u00bd\u0005,\u0000\u0000\u00bc\u00bb"+ - "\u0001\u0000\u0000\u0000\u00bc\u00bd\u0001\u0000\u0000\u0000\u00bd\u00be"+ - "\u0001\u0000\u0000\u0000\u00be\u00bf\u0005+\u0000\u0000\u00bf\u00c0\u0003"+ - "Z-\u0000\u00c0\u00c9\u0001\u0000\u0000\u0000\u00c1\u00c3\u0003\u000e\u0007"+ - "\u0000\u00c2\u00c4\u0005,\u0000\u0000\u00c3\u00c2\u0001\u0000\u0000\u0000"+ - "\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c4\u00c5\u0001\u0000\u0000\u0000"+ - "\u00c5\u00c6\u00051\u0000\u0000\u00c6\u00c7\u0003Z-\u0000\u00c7\u00c9"+ - "\u0001\u0000\u0000\u0000\u00c8\u00ba\u0001\u0000\u0000\u0000\u00c8\u00c1"+ - "\u0001\u0000\u0000\u0000\u00c9\r\u0001\u0000\u0000\u0000\u00ca\u00d0\u0003"+ - "\u0010\b\u0000\u00cb\u00cc\u0003\u0010\b\u0000\u00cc\u00cd\u0003\\.\u0000"+ - "\u00cd\u00ce\u0003\u0010\b\u0000\u00ce\u00d0\u0001\u0000\u0000\u0000\u00cf"+ - "\u00ca\u0001\u0000\u0000\u0000\u00cf\u00cb\u0001\u0000\u0000\u0000\u00d0"+ - "\u000f\u0001\u0000\u0000\u0000\u00d1\u00d2\u0006\b\uffff\uffff\u0000\u00d2"+ - "\u00d6\u0003\u0012\t\u0000\u00d3\u00d4\u0007\u0000\u0000\u0000\u00d4\u00d6"+ - "\u0003\u0010\b\u0003\u00d5\u00d1\u0001\u0000\u0000\u0000\u00d5\u00d3\u0001"+ - "\u0000\u0000\u0000\u00d6\u00df\u0001\u0000\u0000\u0000\u00d7\u00d8\n\u0002"+ - "\u0000\u0000\u00d8\u00d9\u0007\u0001\u0000\u0000\u00d9\u00de\u0003\u0010"+ - "\b\u0003\u00da\u00db\n\u0001\u0000\u0000\u00db\u00dc\u0007\u0000\u0000"+ - "\u0000\u00dc\u00de\u0003\u0010\b\u0002\u00dd\u00d7\u0001\u0000\u0000\u0000"+ - "\u00dd\u00da\u0001\u0000\u0000\u0000\u00de\u00e1\u0001\u0000\u0000\u0000"+ - "\u00df\u00dd\u0001\u0000\u0000\u0000\u00df\u00e0\u0001\u0000\u0000\u0000"+ - "\u00e0\u0011\u0001\u0000\u0000\u0000\u00e1\u00df\u0001\u0000\u0000\u0000"+ - "\u00e2\u00ea\u00038\u001c\u0000\u00e3\u00ea\u00030\u0018\u0000\u00e4\u00ea"+ - "\u0003\u0014\n\u0000\u00e5\u00e6\u0005(\u0000\u0000\u00e6\u00e7\u0003"+ - "\n\u0005\u0000\u00e7\u00e8\u00052\u0000\u0000\u00e8\u00ea\u0001\u0000"+ - "\u0000\u0000\u00e9\u00e2\u0001\u0000\u0000\u0000\u00e9\u00e3\u0001\u0000"+ - "\u0000\u0000\u00e9\u00e4\u0001\u0000\u0000\u0000\u00e9\u00e5\u0001\u0000"+ - "\u0000\u0000\u00ea\u0013\u0001\u0000\u0000\u0000\u00eb\u00ec\u00034\u001a"+ - "\u0000\u00ec\u00f6\u0005(\u0000\u0000\u00ed\u00f7\u0005=\u0000\u0000\u00ee"+ - "\u00f3\u0003\n\u0005\u0000\u00ef\u00f0\u0005\"\u0000\u0000\u00f0\u00f2"+ - "\u0003\n\u0005\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000\u00f2\u00f5\u0001"+ - "\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f3\u00f4\u0001"+ - "\u0000\u0000\u0000\u00f4\u00f7\u0001\u0000\u0000\u0000\u00f5\u00f3\u0001"+ - "\u0000\u0000\u0000\u00f6\u00ed\u0001\u0000\u0000\u0000\u00f6\u00ee\u0001"+ - "\u0000\u0000\u0000\u00f6\u00f7\u0001\u0000\u0000\u0000\u00f7\u00f8\u0001"+ - "\u0000\u0000\u0000\u00f8\u00f9\u00052\u0000\u0000\u00f9\u0015\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fb\u0005\u000e\u0000\u0000\u00fb\u00fc\u0003\u0018"+ - "\f\u0000\u00fc\u0017\u0001\u0000\u0000\u0000\u00fd\u0102\u0003\u001a\r"+ - "\u0000\u00fe\u00ff\u0005\"\u0000\u0000\u00ff\u0101\u0003\u001a\r\u0000"+ - "\u0100\u00fe\u0001\u0000\u0000\u0000\u0101\u0104\u0001\u0000\u0000\u0000"+ - "\u0102\u0100\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000\u0000"+ - "\u0103\u0019\u0001\u0000\u0000\u0000\u0104\u0102\u0001\u0000\u0000\u0000"+ - "\u0105\u010b\u0003\n\u0005\u0000\u0106\u0107\u00030\u0018\u0000\u0107"+ - "\u0108\u0005!\u0000\u0000\u0108\u0109\u0003\n\u0005\u0000\u0109\u010b"+ - "\u0001\u0000\u0000\u0000\u010a\u0105\u0001\u0000\u0000\u0000\u010a\u0106"+ - "\u0001\u0000\u0000\u0000\u010b\u001b\u0001\u0000\u0000\u0000\u010c\u010d"+ - "\u0005\u0006\u0000\u0000\u010d\u0112\u0003\u001e\u000f\u0000\u010e\u010f"+ - "\u0005\"\u0000\u0000\u010f\u0111\u0003\u001e\u000f\u0000\u0110\u010e\u0001"+ - "\u0000\u0000\u0000\u0111\u0114\u0001\u0000\u0000\u0000\u0112\u0110\u0001"+ - "\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u0116\u0001"+ - "\u0000\u0000\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0117\u0003"+ - "$\u0012\u0000\u0116\u0115\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000"+ - "\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u011a\u0003 \u0010"+ - "\u0000\u0119\u0118\u0001\u0000\u0000\u0000\u0119\u011a\u0001\u0000\u0000"+ - "\u0000\u011a\u001d\u0001\u0000\u0000\u0000\u011b\u011c\u0007\u0002\u0000"+ - "\u0000\u011c\u001f\u0001\u0000\u0000\u0000\u011d\u011e\u0005G\u0000\u0000"+ - "\u011e\u0123\u0003\"\u0011\u0000\u011f\u0120\u0005\"\u0000\u0000\u0120"+ - "\u0122\u0003\"\u0011\u0000\u0121\u011f\u0001\u0000\u0000\u0000\u0122\u0125"+ - "\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0123\u0124"+ - "\u0001\u0000\u0000\u0000\u0124!\u0001\u0000\u0000\u0000\u0125\u0123\u0001"+ - "\u0000\u0000\u0000\u0126\u0127\u0003Z-\u0000\u0127\u0128\u0005!\u0000"+ - "\u0000\u0128\u0129\u0003Z-\u0000\u0129#\u0001\u0000\u0000\u0000\u012a"+ - "\u012d\u0003&\u0013\u0000\u012b\u012d\u0003(\u0014\u0000\u012c\u012a\u0001"+ - "\u0000\u0000\u0000\u012c\u012b\u0001\u0000\u0000\u0000\u012d%\u0001\u0000"+ - "\u0000\u0000\u012e\u012f\u0005H\u0000\u0000\u012f\u0134\u0003\u001e\u000f"+ - "\u0000\u0130\u0131\u0005\"\u0000\u0000\u0131\u0133\u0003\u001e\u000f\u0000"+ - "\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0136\u0001\u0000\u0000\u0000"+ - "\u0134\u0132\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000\u0000"+ - "\u0135\'\u0001\u0000\u0000\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137"+ - "\u0138\u0005@\u0000\u0000\u0138\u0139\u0003&\u0013\u0000\u0139\u013a\u0005"+ - "A\u0000\u0000\u013a)\u0001\u0000\u0000\u0000\u013b\u013c\u0005\u0004\u0000"+ - "\u0000\u013c\u013d\u0003\u0018\f\u0000\u013d+\u0001\u0000\u0000\u0000"+ - "\u013e\u0140\u0005\u0011\u0000\u0000\u013f\u0141\u0003\u0018\f\u0000\u0140"+ - "\u013f\u0001\u0000\u0000\u0000\u0140\u0141\u0001\u0000\u0000\u0000\u0141"+ - "\u0144\u0001\u0000\u0000\u0000\u0142\u0143\u0005\u001e\u0000\u0000\u0143"+ - "\u0145\u0003\u0018\f\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145"+ - "\u0001\u0000\u0000\u0000\u0145-\u0001\u0000\u0000\u0000\u0146\u0147\u0005"+ - "\b\u0000\u0000\u0147\u014a\u0003\u0018\f\u0000\u0148\u0149\u0005\u001e"+ - "\u0000\u0000\u0149\u014b\u0003\u0018\f\u0000\u014a\u0148\u0001\u0000\u0000"+ - "\u0000\u014a\u014b\u0001\u0000\u0000\u0000\u014b/\u0001\u0000\u0000\u0000"+ - "\u014c\u0151\u00034\u001a\u0000\u014d\u014e\u0005$\u0000\u0000\u014e\u0150"+ - "\u00034\u001a\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001"+ - "\u0000\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001"+ - "\u0000\u0000\u0000\u01521\u0001\u0000\u0000\u0000\u0153\u0151\u0001\u0000"+ - "\u0000\u0000\u0154\u0159\u00036\u001b\u0000\u0155\u0156\u0005$\u0000\u0000"+ - "\u0156\u0158\u00036\u001b\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158"+ - "\u015b\u0001\u0000\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159"+ - "\u015a\u0001\u0000\u0000\u0000\u015a3\u0001\u0000\u0000\u0000\u015b\u0159"+ - "\u0001\u0000\u0000\u0000\u015c\u015d\u0007\u0003\u0000\u0000\u015d5\u0001"+ - "\u0000\u0000\u0000\u015e\u015f\u0005M\u0000\u0000\u015f7\u0001\u0000\u0000"+ - "\u0000\u0160\u018b\u0005-\u0000\u0000\u0161\u0162\u0003X,\u0000\u0162"+ - "\u0163\u0005B\u0000\u0000\u0163\u018b\u0001\u0000\u0000\u0000\u0164\u018b"+ - "\u0003V+\u0000\u0165\u018b\u0003X,\u0000\u0166\u018b\u0003R)\u0000\u0167"+ - "\u018b\u00050\u0000\u0000\u0168\u018b\u0003Z-\u0000\u0169\u016a\u0005"+ - "@\u0000\u0000\u016a\u016f\u0003T*\u0000\u016b\u016c\u0005\"\u0000\u0000"+ - "\u016c\u016e\u0003T*\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016e\u0171"+ - "\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u016f\u0170"+ - "\u0001\u0000\u0000\u0000\u0170\u0172\u0001\u0000\u0000\u0000\u0171\u016f"+ - "\u0001\u0000\u0000\u0000\u0172\u0173\u0005A\u0000\u0000\u0173\u018b\u0001"+ - "\u0000\u0000\u0000\u0174\u0175\u0005@\u0000\u0000\u0175\u017a\u0003R)"+ - "\u0000\u0176\u0177\u0005\"\u0000\u0000\u0177\u0179\u0003R)\u0000\u0178"+ - "\u0176\u0001\u0000\u0000\u0000\u0179\u017c\u0001\u0000\u0000\u0000\u017a"+ - "\u0178\u0001\u0000\u0000\u0000\u017a\u017b\u0001\u0000\u0000\u0000\u017b"+ - "\u017d\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d"+ - "\u017e\u0005A\u0000\u0000\u017e\u018b\u0001\u0000\u0000\u0000\u017f\u0180"+ - "\u0005@\u0000\u0000\u0180\u0185\u0003Z-\u0000\u0181\u0182\u0005\"\u0000"+ - "\u0000\u0182\u0184\u0003Z-\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0184"+ - "\u0187\u0001\u0000\u0000\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0185"+ - "\u0186\u0001\u0000\u0000\u0000\u0186\u0188\u0001\u0000\u0000\u0000\u0187"+ - "\u0185\u0001\u0000\u0000\u0000\u0188\u0189\u0005A\u0000\u0000\u0189\u018b"+ - "\u0001\u0000\u0000\u0000\u018a\u0160\u0001\u0000\u0000\u0000\u018a\u0161"+ - "\u0001\u0000\u0000\u0000\u018a\u0164\u0001\u0000\u0000\u0000\u018a\u0165"+ - "\u0001\u0000\u0000\u0000\u018a\u0166\u0001\u0000\u0000\u0000\u018a\u0167"+ - "\u0001\u0000\u0000\u0000\u018a\u0168\u0001\u0000\u0000\u0000\u018a\u0169"+ - "\u0001\u0000\u0000\u0000\u018a\u0174\u0001\u0000\u0000\u0000\u018a\u017f"+ - "\u0001\u0000\u0000\u0000\u018b9\u0001\u0000\u0000\u0000\u018c\u018d\u0005"+ - "\n\u0000\u0000\u018d\u018e\u0005\u001c\u0000\u0000\u018e;\u0001\u0000"+ - "\u0000\u0000\u018f\u0190\u0005\u0010\u0000\u0000\u0190\u0195\u0003>\u001f"+ - "\u0000\u0191\u0192\u0005\"\u0000\u0000\u0192\u0194\u0003>\u001f\u0000"+ - "\u0193\u0191\u0001\u0000\u0000\u0000\u0194\u0197\u0001\u0000\u0000\u0000"+ - "\u0195\u0193\u0001\u0000\u0000\u0000\u0195\u0196\u0001\u0000\u0000\u0000"+ - "\u0196=\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198"+ - "\u019a\u0003\n\u0005\u0000\u0199\u019b\u0007\u0004\u0000\u0000\u019a\u0199"+ - "\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000\u0000\u0000\u019b\u019e"+ - "\u0001\u0000\u0000\u0000\u019c\u019d\u0005.\u0000\u0000\u019d\u019f\u0007"+ - "\u0005\u0000\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019e\u019f\u0001"+ - "\u0000\u0000\u0000\u019f?\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005\t"+ - "\u0000\u0000\u01a1\u01a6\u00032\u0019\u0000\u01a2\u01a3\u0005\"\u0000"+ - "\u0000\u01a3\u01a5\u00032\u0019\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a8\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000"+ - "\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7A\u0001\u0000\u0000\u0000\u01a8"+ - "\u01a6\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005\u0002\u0000\u0000\u01aa"+ - "\u01af\u00032\u0019\u0000\u01ab\u01ac\u0005\"\u0000\u0000\u01ac\u01ae"+ - "\u00032\u0019\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ae\u01b1\u0001"+ - "\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0\u0001"+ - "\u0000\u0000\u0000\u01b0C\u0001\u0000\u0000\u0000\u01b1\u01af\u0001\u0000"+ - "\u0000\u0000\u01b2\u01b3\u0005\r\u0000\u0000\u01b3\u01b8\u0003F#\u0000"+ - "\u01b4\u01b5\u0005\"\u0000\u0000\u01b5\u01b7\u0003F#\u0000\u01b6\u01b4"+ - "\u0001\u0000\u0000\u0000\u01b7\u01ba\u0001\u0000\u0000\u0000\u01b8\u01b6"+ - "\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9E\u0001"+ - "\u0000\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01bc\u0003"+ - "2\u0019\u0000\u01bc\u01bd\u0005Q\u0000\u0000\u01bd\u01be\u00032\u0019"+ - "\u0000\u01beG\u0001\u0000\u0000\u0000\u01bf\u01c0\u0005\u0001\u0000\u0000"+ - "\u01c0\u01c1\u0003\u0012\t\u0000\u01c1\u01c3\u0003Z-\u0000\u01c2\u01c4"+ - "\u0003N\'\u0000\u01c3\u01c2\u0001\u0000\u0000\u0000\u01c3\u01c4\u0001"+ - "\u0000\u0000\u0000\u01c4I\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005\u0007"+ - "\u0000\u0000\u01c6\u01c7\u0003\u0012\t\u0000\u01c7\u01c8\u0003Z-\u0000"+ - "\u01c8K\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\f\u0000\u0000\u01ca"+ - "\u01cb\u00030\u0018\u0000\u01cbM\u0001\u0000\u0000\u0000\u01cc\u01d1\u0003"+ - "P(\u0000\u01cd\u01ce\u0005\"\u0000\u0000\u01ce\u01d0\u0003P(\u0000\u01cf"+ - "\u01cd\u0001\u0000\u0000\u0000\u01d0\u01d3\u0001\u0000\u0000\u0000\u01d1"+ - "\u01cf\u0001\u0000\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2"+ - "O\u0001\u0000\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d4\u01d5"+ - "\u00034\u001a\u0000\u01d5\u01d6\u0005!\u0000\u0000\u01d6\u01d7\u00038"+ - "\u001c\u0000\u01d7Q\u0001\u0000\u0000\u0000\u01d8\u01d9\u0007\u0006\u0000"+ - "\u0000\u01d9S\u0001\u0000\u0000\u0000\u01da\u01dd\u0003V+\u0000\u01db"+ - "\u01dd\u0003X,\u0000\u01dc\u01da\u0001\u0000\u0000\u0000\u01dc\u01db\u0001"+ - "\u0000\u0000\u0000\u01ddU\u0001\u0000\u0000\u0000\u01de\u01e0\u0007\u0000"+ - "\u0000\u0000\u01df\u01de\u0001\u0000\u0000\u0000\u01df\u01e0\u0001\u0000"+ - "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0005\u001d"+ - "\u0000\u0000\u01e2W\u0001\u0000\u0000\u0000\u01e3\u01e5\u0007\u0000\u0000"+ - "\u0000\u01e4\u01e3\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000"+ - "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005\u001c\u0000"+ - "\u0000\u01e7Y\u0001\u0000\u0000\u0000\u01e8\u01e9\u0005\u001b\u0000\u0000"+ - "\u01e9[\u0001\u0000\u0000\u0000\u01ea\u01eb\u0007\u0007\u0000\u0000\u01eb"+ - "]\u0001\u0000\u0000\u0000\u01ec\u01ed\u0005\u0005\u0000\u0000\u01ed\u01ee"+ - "\u0003`0\u0000\u01ee_\u0001\u0000\u0000\u0000\u01ef\u01f0\u0005@\u0000"+ - "\u0000\u01f0\u01f1\u0003\u0002\u0001\u0000\u01f1\u01f2\u0005A\u0000\u0000"+ - "\u01f2a\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000f\u0000\u0000\u01f4"+ - "\u01f5\u0005a\u0000\u0000\u01f5c\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005"+ - "\u000b\u0000\u0000\u01f7\u01f8\u0005e\u0000\u0000\u01f8e\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fa\u0005\u0003\u0000\u0000\u01fa\u01fd\u0005W\u0000\u0000"+ - "\u01fb\u01fc\u0005U\u0000\u0000\u01fc\u01fe\u00032\u0019\u0000\u01fd\u01fb"+ - "\u0001\u0000\u0000\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe\u0208"+ - "\u0001\u0000\u0000\u0000\u01ff\u0200\u0005V\u0000\u0000\u0200\u0205\u0003"+ - "h4\u0000\u0201\u0202\u0005\"\u0000\u0000\u0202\u0204\u0003h4\u0000\u0203"+ - "\u0201\u0001\u0000\u0000\u0000\u0204\u0207\u0001\u0000\u0000\u0000\u0205"+ - "\u0203\u0001\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206"+ - "\u0209\u0001\u0000\u0000\u0000\u0207\u0205\u0001\u0000\u0000\u0000\u0208"+ - "\u01ff\u0001\u0000\u0000\u0000\u0208\u0209\u0001\u0000\u0000\u0000\u0209"+ - "g\u0001\u0000\u0000\u0000\u020a\u020b\u00032\u0019\u0000\u020b\u020c\u0005"+ - "!\u0000\u0000\u020c\u020e\u0001\u0000\u0000\u0000\u020d\u020a\u0001\u0000"+ - "\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000"+ - "\u0000\u0000\u020f\u0210\u00032\u0019\u0000\u0210i\u0001\u0000\u0000\u0000"+ - "3u}\u008c\u0098\u00a1\u00a9\u00ad\u00b5\u00b7\u00bc\u00c3\u00c8\u00cf"+ - "\u00d5\u00dd\u00df\u00e9\u00f3\u00f6\u0102\u010a\u0112\u0116\u0119\u0123"+ - "\u012c\u0134\u0140\u0144\u014a\u0151\u0159\u016f\u017a\u0185\u018a\u0195"+ - "\u019a\u019e\u01a6\u01af\u01b8\u01c3\u01d1\u01dc\u01df\u01e4\u01fd\u0205"+ - "\u0208\u020d"; + "\u0005\u0005\u00b8\b\u0005\n\u0005\f\u0005\u00bb\t\u0005\u0001\u0006\u0001"+ + "\u0006\u0003\u0006\u00bf\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0003\u0006\u00c6\b\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0003\u0006\u00cb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0003\u0007\u00d2\b\u0007\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0003\b\u00d8\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005"+ + "\b\u00e0\b\b\n\b\f\b\u00e3\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0003\t\u00ed\b\t\u0001\t\u0001\t\u0001\t\u0005"+ + "\t\u00f2\b\t\n\t\f\t\u00f5\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0005\n\u00fd\b\n\n\n\f\n\u0100\t\n\u0003\n\u0102\b\n\u0001\n"+ + "\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0005\r\u010e\b\r\n\r\f\r\u0111\t\r\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0118\b\u000e\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u011e\b\u000f\n\u000f"+ + "\f\u000f\u0121\t\u000f\u0001\u000f\u0003\u000f\u0124\b\u000f\u0001\u000f"+ + "\u0003\u000f\u0127\b\u000f\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0005\u0011\u012f\b\u0011\n\u0011\f\u0011\u0132"+ + "\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0003\u0013\u013a\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0005\u0014\u0140\b\u0014\n\u0014\f\u0014\u0143\t\u0014\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0017\u0001\u0017\u0003\u0017\u014e\b\u0017\u0001\u0017\u0001\u0017"+ + "\u0003\u0017\u0152\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0003\u0018\u0158\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ + "\u015d\b\u0019\n\u0019\f\u0019\u0160\t\u0019\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0005\u001a\u0165\b\u001a\n\u001a\f\u001a\u0168\t\u001a\u0001\u001b"+ + "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u017b\b\u001d"+ + "\n\u001d\f\u001d\u017e\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u0186\b\u001d\n\u001d\f\u001d"+ + "\u0189\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001d\u0005\u001d\u0191\b\u001d\n\u001d\f\u001d\u0194\t\u001d\u0001"+ + "\u001d\u0001\u001d\u0003\u001d\u0198\b\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a1"+ + "\b\u001f\n\u001f\f\u001f\u01a4\t\u001f\u0001 \u0001 \u0003 \u01a8\b \u0001"+ + " \u0001 \u0003 \u01ac\b \u0001!\u0001!\u0001!\u0001!\u0005!\u01b2\b!\n"+ + "!\f!\u01b5\t!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f"+ + "\"\u01be\t\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01c4\b#\n#\f#\u01c7\t"+ + "#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0003%\u01d1"+ + "\b%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001("+ + "\u0001(\u0005(\u01dd\b(\n(\f(\u01e0\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ + "*\u0001*\u0001+\u0001+\u0003+\u01ea\b+\u0001,\u0003,\u01ed\b,\u0001,\u0001"+ + ",\u0001-\u0003-\u01f2\b-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001"+ + "0\u00010\u00010\u00011\u00011\u00011\u00011\u00012\u00012\u00012\u0001"+ + "3\u00013\u00013\u00014\u00014\u00014\u00014\u00034\u020b\b4\u00014\u0001"+ + "4\u00014\u00014\u00054\u0211\b4\n4\f4\u0214\t4\u00034\u0216\b4\u00015"+ + "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ + "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ + "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ + "j\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000DDJJ\u0001\u0000CD\u0002"+ + "\u0000 $$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000"+ + "l\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001"+ + "\u0000\u0000\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000"+ + "\u0000\u0000\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000"+ + "\u0000\u000e\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000"+ + "\u0000\u0012\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000"+ + "\u0000\u0016\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000"+ + "\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000"+ + "\u0000\u001e\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000"+ + "\"\u012a\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139"+ + "\u0001\u0000\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000"+ + "\u0000\u0000,\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u0000"+ + "0\u0153\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161"+ + "\u0001\u0000\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000"+ + "\u0000\u0000:\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000"+ + ">\u019c\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad"+ + "\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000"+ + "\u0000\u0000H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000"+ + "L\u01d2\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9"+ + "\u0001\u0000\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000"+ + "\u0000\u0000V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000"+ + "Z\u01f1\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7"+ + "\u0001\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000"+ + "\u0000\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000"+ + "h\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ + "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ + "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ + "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ + "\u0006\u0003\u0000ur\u0001\u0000\u0000\u0000vy\u0001\u0000\u0000\u0000"+ + "wu\u0001\u0000\u0000\u0000wx\u0001\u0000\u0000\u0000x\u0003\u0001\u0000"+ + "\u0000\u0000yw\u0001\u0000\u0000\u0000z\u0080\u0003`0\u0000{\u0080\u0003"+ + "\u001e\u000f\u0000|\u0080\u0003\u0018\f\u0000}\u0080\u0003d2\u0000~\u0080"+ + "\u0003f3\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+ + "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+ + "~\u0001\u0000\u0000\u0000\u0080\u0005\u0001\u0000\u0000\u0000\u0081\u008f"+ + "\u0003,\u0016\u0000\u0082\u008f\u00030\u0018\u0000\u0083\u008f\u0003<"+ + "\u001e\u0000\u0084\u008f\u0003B!\u0000\u0085\u008f\u0003>\u001f\u0000"+ + "\u0086\u008f\u0003.\u0017\u0000\u0087\u008f\u0003\b\u0004\u0000\u0088"+ + "\u008f\u0003D\"\u0000\u0089\u008f\u0003F#\u0000\u008a\u008f\u0003J%\u0000"+ + "\u008b\u008f\u0003L&\u0000\u008c\u008f\u0003h4\u0000\u008d\u008f\u0003"+ + "N\'\u0000\u008e\u0081\u0001\u0000\u0000\u0000\u008e\u0082\u0001\u0000"+ + "\u0000\u0000\u008e\u0083\u0001\u0000\u0000\u0000\u008e\u0084\u0001\u0000"+ + "\u0000\u0000\u008e\u0085\u0001\u0000\u0000\u0000\u008e\u0086\u0001\u0000"+ + "\u0000\u0000\u008e\u0087\u0001\u0000\u0000\u0000\u008e\u0088\u0001\u0000"+ + "\u0000\u0000\u008e\u0089\u0001\u0000\u0000\u0000\u008e\u008a\u0001\u0000"+ + "\u0000\u0000\u008e\u008b\u0001\u0000\u0000\u0000\u008e\u008c\u0001\u0000"+ + "\u0000\u0000\u008e\u008d\u0001\u0000\u0000\u0000\u008f\u0007\u0001\u0000"+ + "\u0000\u0000\u0090\u0091\u0005\u0012\u0000\u0000\u0091\u0092\u0003\n\u0005"+ + "\u0000\u0092\t\u0001\u0000\u0000\u0000\u0093\u0094\u0006\u0005\uffff\uffff"+ + "\u0000\u0094\u0095\u0005-\u0000\u0000\u0095\u00b0\u0003\n\u0005\u0007"+ + "\u0096\u00b0\u0003\u000e\u0007\u0000\u0097\u00b0\u0003\f\u0006\u0000\u0098"+ + "\u009a\u0003\u000e\u0007\u0000\u0099\u009b\u0005-\u0000\u0000\u009a\u0099"+ + "\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009c"+ + "\u0001\u0000\u0000\u0000\u009c\u009d\u0005*\u0000\u0000\u009d\u009e\u0005"+ + ")\u0000\u0000\u009e\u00a3\u0003\u000e\u0007\u0000\u009f\u00a0\u0005#\u0000"+ + "\u0000\u00a0\u00a2\u0003\u000e\u0007\u0000\u00a1\u009f\u0001\u0000\u0000"+ + "\u0000\u00a2\u00a5\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000"+ + "\u0000\u00a3\u00a4\u0001\u0000\u0000\u0000\u00a4\u00a6\u0001\u0000\u0000"+ + "\u0000\u00a5\u00a3\u0001\u0000\u0000\u0000\u00a6\u00a7\u00053\u0000\u0000"+ + "\u00a7\u00b0\u0001\u0000\u0000\u0000\u00a8\u00a9\u0003\u000e\u0007\u0000"+ + "\u00a9\u00ab\u0005+\u0000\u0000\u00aa\u00ac\u0005-\u0000\u0000\u00ab\u00aa"+ + "\u0001\u0000\u0000\u0000\u00ab\u00ac\u0001\u0000\u0000\u0000\u00ac\u00ad"+ + "\u0001\u0000\u0000\u0000\u00ad\u00ae\u0005.\u0000\u0000\u00ae\u00b0\u0001"+ + "\u0000\u0000\u0000\u00af\u0093\u0001\u0000\u0000\u0000\u00af\u0096\u0001"+ + "\u0000\u0000\u0000\u00af\u0097\u0001\u0000\u0000\u0000\u00af\u0098\u0001"+ + "\u0000\u0000\u0000\u00af\u00a8\u0001\u0000\u0000\u0000\u00b0\u00b9\u0001"+ + "\u0000\u0000\u0000\u00b1\u00b2\n\u0004\u0000\u0000\u00b2\u00b3\u0005\u001f"+ + "\u0000\u0000\u00b3\u00b8\u0003\n\u0005\u0005\u00b4\u00b5\n\u0003\u0000"+ + "\u0000\u00b5\u00b6\u00050\u0000\u0000\u00b6\u00b8\u0003\n\u0005\u0004"+ + "\u00b7\u00b1\u0001\u0000\u0000\u0000\u00b7\u00b4\u0001\u0000\u0000\u0000"+ + "\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ + "\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u000b\u0001\u0000\u0000\u0000"+ + "\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00be\u0003\u000e\u0007\u0000"+ + "\u00bd\u00bf\u0005-\u0000\u0000\u00be\u00bd\u0001\u0000\u0000\u0000\u00be"+ + "\u00bf\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0"+ + "\u00c1\u0005,\u0000\u0000\u00c1\u00c2\u0003\\.\u0000\u00c2\u00cb\u0001"+ + "\u0000\u0000\u0000\u00c3\u00c5\u0003\u000e\u0007\u0000\u00c4\u00c6\u0005"+ + "-\u0000\u0000\u00c5\u00c4\u0001\u0000\u0000\u0000\u00c5\u00c6\u0001\u0000"+ + "\u0000\u0000\u00c6\u00c7\u0001\u0000\u0000\u0000\u00c7\u00c8\u00052\u0000"+ + "\u0000\u00c8\u00c9\u0003\\.\u0000\u00c9\u00cb\u0001\u0000\u0000\u0000"+ + "\u00ca\u00bc\u0001\u0000\u0000\u0000\u00ca\u00c3\u0001\u0000\u0000\u0000"+ + "\u00cb\r\u0001\u0000\u0000\u0000\u00cc\u00d2\u0003\u0010\b\u0000\u00cd"+ + "\u00ce\u0003\u0010\b\u0000\u00ce\u00cf\u0003^/\u0000\u00cf\u00d0\u0003"+ + "\u0010\b\u0000\u00d0\u00d2\u0001\u0000\u0000\u0000\u00d1\u00cc\u0001\u0000"+ + "\u0000\u0000\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d2\u000f\u0001\u0000"+ + "\u0000\u0000\u00d3\u00d4\u0006\b\uffff\uffff\u0000\u00d4\u00d8\u0003\u0012"+ + "\t\u0000\u00d5\u00d6\u0007\u0000\u0000\u0000\u00d6\u00d8\u0003\u0010\b"+ + "\u0003\u00d7\u00d3\u0001\u0000\u0000\u0000\u00d7\u00d5\u0001\u0000\u0000"+ + "\u0000\u00d8\u00e1\u0001\u0000\u0000\u0000\u00d9\u00da\n\u0002\u0000\u0000"+ + "\u00da\u00db\u0007\u0001\u0000\u0000\u00db\u00e0\u0003\u0010\b\u0003\u00dc"+ + "\u00dd\n\u0001\u0000\u0000\u00dd\u00de\u0007\u0000\u0000\u0000\u00de\u00e0"+ + "\u0003\u0010\b\u0002\u00df\u00d9\u0001\u0000\u0000\u0000\u00df\u00dc\u0001"+ + "\u0000\u0000\u0000\u00e0\u00e3\u0001\u0000\u0000\u0000\u00e1\u00df\u0001"+ + "\u0000\u0000\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u0011\u0001"+ + "\u0000\u0000\u0000\u00e3\u00e1\u0001\u0000\u0000\u0000\u00e4\u00e5\u0006"+ + "\t\uffff\uffff\u0000\u00e5\u00ed\u0003:\u001d\u0000\u00e6\u00ed\u0003"+ + "2\u0019\u0000\u00e7\u00ed\u0003\u0014\n\u0000\u00e8\u00e9\u0005)\u0000"+ + "\u0000\u00e9\u00ea\u0003\n\u0005\u0000\u00ea\u00eb\u00053\u0000\u0000"+ + "\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec\u00e4\u0001\u0000\u0000\u0000"+ + "\u00ec\u00e6\u0001\u0000\u0000\u0000\u00ec\u00e7\u0001\u0000\u0000\u0000"+ + "\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ed\u00f3\u0001\u0000\u0000\u0000"+ + "\u00ee\u00ef\n\u0001\u0000\u0000\u00ef\u00f0\u0005\"\u0000\u0000\u00f0"+ + "\u00f2\u0003\u0016\u000b\u0000\u00f1\u00ee\u0001\u0000\u0000\u0000\u00f2"+ + "\u00f5\u0001\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f3"+ + "\u00f4\u0001\u0000\u0000\u0000\u00f4\u0013\u0001\u0000\u0000\u0000\u00f5"+ + "\u00f3\u0001\u0000\u0000\u0000\u00f6\u00f7\u00036\u001b\u0000\u00f7\u0101"+ + "\u0005)\u0000\u0000\u00f8\u0102\u0005>\u0000\u0000\u00f9\u00fe\u0003\n"+ + "\u0005\u0000\u00fa\u00fb\u0005#\u0000\u0000\u00fb\u00fd\u0003\n\u0005"+ + "\u0000\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000"+ + "\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000"+ + "\u0000\u00ff\u0102\u0001\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000"+ + "\u0000\u0101\u00f8\u0001\u0000\u0000\u0000\u0101\u00f9\u0001\u0000\u0000"+ + "\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000"+ + "\u0000\u0103\u0104\u00053\u0000\u0000\u0104\u0015\u0001\u0000\u0000\u0000"+ + "\u0105\u0106\u00036\u001b\u0000\u0106\u0017\u0001\u0000\u0000\u0000\u0107"+ + "\u0108\u0005\u000e\u0000\u0000\u0108\u0109\u0003\u001a\r\u0000\u0109\u0019"+ + "\u0001\u0000\u0000\u0000\u010a\u010f\u0003\u001c\u000e\u0000\u010b\u010c"+ + "\u0005#\u0000\u0000\u010c\u010e\u0003\u001c\u000e\u0000\u010d\u010b\u0001"+ + "\u0000\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000\u010f\u010d\u0001"+ + "\u0000\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001b\u0001"+ + "\u0000\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0118\u0003"+ + "\n\u0005\u0000\u0113\u0114\u00032\u0019\u0000\u0114\u0115\u0005!\u0000"+ + "\u0000\u0115\u0116\u0003\n\u0005\u0000\u0116\u0118\u0001\u0000\u0000\u0000"+ + "\u0117\u0112\u0001\u0000\u0000\u0000\u0117\u0113\u0001\u0000\u0000\u0000"+ + "\u0118\u001d\u0001\u0000\u0000\u0000\u0119\u011a\u0005\u0006\u0000\u0000"+ + "\u011a\u011f\u0003 \u0010\u0000\u011b\u011c\u0005#\u0000\u0000\u011c\u011e"+ + "\u0003 \u0010\u0000\u011d\u011b\u0001\u0000\u0000\u0000\u011e\u0121\u0001"+ + "\u0000\u0000\u0000\u011f\u011d\u0001\u0000\u0000\u0000\u011f\u0120\u0001"+ + "\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121\u011f\u0001"+ + "\u0000\u0000\u0000\u0122\u0124\u0003&\u0013\u0000\u0123\u0122\u0001\u0000"+ + "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ + "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ + "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ + "\u0000\u0128\u0129\u0007\u0002\u0000\u0000\u0129!\u0001\u0000\u0000\u0000"+ + "\u012a\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d"+ + "\u0005#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000"+ + "\u0000\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000"+ + "\u0000\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000"+ + "\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000"+ + "\u0134\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001"+ + "\u0000\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015"+ + "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000"+ + "\u0000\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000"+ + "\u013c\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140"+ + "\u0003 \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ + "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ + "\u0000\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ + "\u0000\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000"+ + "\u0146\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149"+ + "\u0005\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001"+ + "\u0000\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003"+ + "\u001a\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ + "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e"+ + "\u0000\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000"+ + "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000"+ + "\u0153\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155"+ + "\u0156\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155"+ + "\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001"+ + "\u0000\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000"+ + "\u0000\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ + "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ + "\u015e\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160"+ + "\u015e\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163"+ + "\u0005%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000"+ + "\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000"+ + "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000"+ + "\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0003\u0000"+ + "\u0000\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c"+ + "9\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ + "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ + "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ + "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ + "\u0176\u0177\u0005A\u0000\u0000\u0177\u017c\u0003V+\u0000\u0178\u0179"+ + "\u0005#\u0000\u0000\u0179\u017b\u0003V+\u0000\u017a\u0178\u0001\u0000"+ + "\u0000\u0000\u017b\u017e\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000"+ + "\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u017f\u0001\u0000"+ + "\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017f\u0180\u0005B\u0000"+ + "\u0000\u0180\u0198\u0001\u0000\u0000\u0000\u0181\u0182\u0005A\u0000\u0000"+ + "\u0182\u0187\u0003T*\u0000\u0183\u0184\u0005#\u0000\u0000\u0184\u0186"+ + "\u0003T*\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186\u0189\u0001\u0000"+ + "\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187\u0188\u0001\u0000"+ + "\u0000\u0000\u0188\u018a\u0001\u0000\u0000\u0000\u0189\u0187\u0001\u0000"+ + "\u0000\u0000\u018a\u018b\u0005B\u0000\u0000\u018b\u0198\u0001\u0000\u0000"+ + "\u0000\u018c\u018d\u0005A\u0000\u0000\u018d\u0192\u0003\\.\u0000\u018e"+ + "\u018f\u0005#\u0000\u0000\u018f\u0191\u0003\\.\u0000\u0190\u018e\u0001"+ + "\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001"+ + "\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193\u0195\u0001"+ + "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005"+ + "B\u0000\u0000\u0196\u0198\u0001\u0000\u0000\u0000\u0197\u016d\u0001\u0000"+ + "\u0000\u0000\u0197\u016e\u0001\u0000\u0000\u0000\u0197\u0171\u0001\u0000"+ + "\u0000\u0000\u0197\u0172\u0001\u0000\u0000\u0000\u0197\u0173\u0001\u0000"+ + "\u0000\u0000\u0197\u0174\u0001\u0000\u0000\u0000\u0197\u0175\u0001\u0000"+ + "\u0000\u0000\u0197\u0176\u0001\u0000\u0000\u0000\u0197\u0181\u0001\u0000"+ + "\u0000\u0000\u0197\u018c\u0001\u0000\u0000\u0000\u0198;\u0001\u0000\u0000"+ + "\u0000\u0199\u019a\u0005\n\u0000\u0000\u019a\u019b\u0005\u001c\u0000\u0000"+ + "\u019b=\u0001\u0000\u0000\u0000\u019c\u019d\u0005\u0010\u0000\u0000\u019d"+ + "\u01a2\u0003@ \u0000\u019e\u019f\u0005#\u0000\u0000\u019f\u01a1\u0003"+ + "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ + "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ + "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ + "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0004\u0000\u0000\u01a7"+ + "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ + "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ + "\u0007\u0005\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ + "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ + "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ + "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ + "\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000"+ + "\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4C\u0001\u0000\u0000\u0000\u01b5"+ + "\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u0002\u0000\u0000\u01b7"+ + "\u01bc\u00034\u001a\u0000\u01b8\u01b9\u0005#\u0000\u0000\u01b9\u01bb\u0003"+ + "4\u001a\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001\u0000"+ + "\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001\u0000"+ + "\u0000\u0000\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000"+ + "\u0000\u01bf\u01c0\u0005\r\u0000\u0000\u01c0\u01c5\u0003H$\u0000\u01c1"+ + "\u01c2\u0005#\u0000\u0000\u01c2\u01c4\u0003H$\u0000\u01c3\u01c1\u0001"+ + "\u0000\u0000\u0000\u01c4\u01c7\u0001\u0000\u0000\u0000\u01c5\u01c3\u0001"+ + "\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6G\u0001\u0000"+ + "\u0000\u0000\u01c7\u01c5\u0001\u0000\u0000\u0000\u01c8\u01c9\u00034\u001a"+ + "\u0000\u01c9\u01ca\u0005R\u0000\u0000\u01ca\u01cb\u00034\u001a\u0000\u01cb"+ + "I\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005\u0001\u0000\u0000\u01cd\u01ce"+ + "\u0003\u0012\t\u0000\u01ce\u01d0\u0003\\.\u0000\u01cf\u01d1\u0003P(\u0000"+ + "\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000"+ + "\u01d1K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u0007\u0000\u0000\u01d3"+ + "\u01d4\u0003\u0012\t\u0000\u01d4\u01d5\u0003\\.\u0000\u01d5M\u0001\u0000"+ + "\u0000\u0000\u01d6\u01d7\u0005\f\u0000\u0000\u01d7\u01d8\u00032\u0019"+ + "\u0000\u01d8O\u0001\u0000\u0000\u0000\u01d9\u01de\u0003R)\u0000\u01da"+ + "\u01db\u0005#\u0000\u0000\u01db\u01dd\u0003R)\u0000\u01dc\u01da\u0001"+ + "\u0000\u0000\u0000\u01dd\u01e0\u0001\u0000\u0000\u0000\u01de\u01dc\u0001"+ + "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ + "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ + "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ + "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0006\u0000\u0000\u01e6U\u0001"+ + "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ + "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ + "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ + "\u01eb\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed"+ + "\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u001d\u0000\u0000\u01ef"+ + "Y\u0001\u0000\u0000\u0000\u01f0\u01f2\u0007\u0000\u0000\u0000\u01f1\u01f0"+ + "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ + "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ + "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ + "\u0000\u0000\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8_\u0001\u0000\u0000"+ + "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ + "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ + "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ + "\u0000\u0200\u0201\u0005\u000f\u0000\u0000\u0201\u0202\u0005b\u0000\u0000"+ + "\u0202e\u0001\u0000\u0000\u0000\u0203\u0204\u0005\u000b\u0000\u0000\u0204"+ + "\u0205\u0005f\u0000\u0000\u0205g\u0001\u0000\u0000\u0000\u0206\u0207\u0005"+ + "\u0003\u0000\u0000\u0207\u020a\u0005X\u0000\u0000\u0208\u0209\u0005V\u0000"+ + "\u0000\u0209\u020b\u00034\u001a\u0000\u020a\u0208\u0001\u0000\u0000\u0000"+ + "\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u0215\u0001\u0000\u0000\u0000"+ + "\u020c\u020d\u0005W\u0000\u0000\u020d\u0212\u0003j5\u0000\u020e\u020f"+ + "\u0005#\u0000\u0000\u020f\u0211\u0003j5\u0000\u0210\u020e\u0001\u0000"+ + "\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000"+ + "\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213\u0216\u0001\u0000"+ + "\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u020c\u0001\u0000"+ + "\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216i\u0001\u0000\u0000"+ + "\u0000\u0217\u0218\u00034\u001a\u0000\u0218\u0219\u0005!\u0000\u0000\u0219"+ + "\u021b\u0001\u0000\u0000\u0000\u021a\u0217\u0001\u0000\u0000\u0000\u021a"+ + "\u021b\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ + "\u021d\u00034\u001a\u0000\u021dk\u0001\u0000\u0000\u00004w\u007f\u008e"+ + "\u009a\u00a3\u00ab\u00af\u00b7\u00b9\u00be\u00c5\u00ca\u00d1\u00d7\u00df"+ + "\u00e1\u00ec\u00f3\u00fe\u0101\u010f\u0117\u011f\u0123\u0126\u0130\u0139"+ + "\u0141\u014d\u0151\u0157\u015e\u0166\u017c\u0187\u0192\u0197\u01a2\u01a7"+ + "\u01ab\u01b3\u01bc\u01c5\u01d0\u01de\u01e9\u01ec\u01f1\u020a\u0212\u0215"+ + "\u021a"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 2d5954517d717..5122eb07371b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -233,37 +233,37 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

    The default implementation does nothing.

    */ - @Override public void enterConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { } + @Override public void enterDereference(EsqlBaseParser.DereferenceContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { } + @Override public void exitDereference(EsqlBaseParser.DereferenceContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void enterDereference(EsqlBaseParser.DereferenceContext ctx) { } + @Override public void enterInlineCast(EsqlBaseParser.InlineCastContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitDereference(EsqlBaseParser.DereferenceContext ctx) { } + @Override public void exitInlineCast(EsqlBaseParser.InlineCastContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void enterFunction(EsqlBaseParser.FunctionContext ctx) { } + @Override public void enterConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitFunction(EsqlBaseParser.FunctionContext ctx) { } + @Override public void exitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { } /** * {@inheritDoc} * @@ -276,6 +276,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFunction(EsqlBaseParser.FunctionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFunction(EsqlBaseParser.FunctionContext ctx) { } /** * {@inheritDoc} * @@ -288,6 +300,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterToDataType(EsqlBaseParser.ToDataTypeContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitToDataType(EsqlBaseParser.ToDataTypeContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 9bbc672e4d51f..a32ac9bd9100c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -144,21 +144,21 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { return visitChildren(ctx); } + @Override public T visitDereference(EsqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitDereference(EsqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); } + @Override public T visitInlineCast(EsqlBaseParser.InlineCastContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitFunction(EsqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } + @Override public T visitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -166,6 +166,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFunction(EsqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -173,6 +180,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitToDataType(EsqlBaseParser.ToDataTypeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index c80b7e5dd878e..6e8000f7fcf8e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -214,41 +214,41 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitArithmeticUnary(EsqlBaseParser.ArithmeticUnaryContext ctx); /** - * Enter a parse tree produced by the {@code constantDefault} + * Enter a parse tree produced by the {@code dereference} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx); + void enterDereference(EsqlBaseParser.DereferenceContext ctx); /** - * Exit a parse tree produced by the {@code constantDefault} + * Exit a parse tree produced by the {@code dereference} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx); + void exitDereference(EsqlBaseParser.DereferenceContext ctx); /** - * Enter a parse tree produced by the {@code dereference} + * Enter a parse tree produced by the {@code inlineCast} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterDereference(EsqlBaseParser.DereferenceContext ctx); + void enterInlineCast(EsqlBaseParser.InlineCastContext ctx); /** - * Exit a parse tree produced by the {@code dereference} + * Exit a parse tree produced by the {@code inlineCast} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitDereference(EsqlBaseParser.DereferenceContext ctx); + void exitInlineCast(EsqlBaseParser.InlineCastContext ctx); /** - * Enter a parse tree produced by the {@code function} + * Enter a parse tree produced by the {@code constantDefault} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterFunction(EsqlBaseParser.FunctionContext ctx); + void enterConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx); /** - * Exit a parse tree produced by the {@code function} + * Exit a parse tree produced by the {@code constantDefault} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitFunction(EsqlBaseParser.FunctionContext ctx); + void exitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx); /** * Enter a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. @@ -261,6 +261,18 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx); + /** + * Enter a parse tree produced by the {@code function} + * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void enterFunction(EsqlBaseParser.FunctionContext ctx); + /** + * Exit a parse tree produced by the {@code function} + * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * @param ctx the parse tree + */ + void exitFunction(EsqlBaseParser.FunctionContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#functionExpression}. * @param ctx the parse tree @@ -271,6 +283,18 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx); + /** + * Enter a parse tree produced by the {@code toDataType} + * labeled alternative in {@link EsqlBaseParser#dataType}. + * @param ctx the parse tree + */ + void enterToDataType(EsqlBaseParser.ToDataTypeContext ctx); + /** + * Exit a parse tree produced by the {@code toDataType} + * labeled alternative in {@link EsqlBaseParser#dataType}. + * @param ctx the parse tree + */ + void exitToDataType(EsqlBaseParser.ToDataTypeContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#rowCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 09da2cb9c3ddb..d6e83b37a0f39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -132,26 +132,26 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitArithmeticUnary(EsqlBaseParser.ArithmeticUnaryContext ctx); /** - * Visit a parse tree produced by the {@code constantDefault} + * Visit a parse tree produced by the {@code dereference} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx); + T visitDereference(EsqlBaseParser.DereferenceContext ctx); /** - * Visit a parse tree produced by the {@code dereference} + * Visit a parse tree produced by the {@code inlineCast} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitDereference(EsqlBaseParser.DereferenceContext ctx); + T visitInlineCast(EsqlBaseParser.InlineCastContext ctx); /** - * Visit a parse tree produced by the {@code function} + * Visit a parse tree produced by the {@code constantDefault} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitFunction(EsqlBaseParser.FunctionContext ctx); + T visitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx); /** * Visit a parse tree produced by the {@code parenthesizedExpression} * labeled alternative in {@link EsqlBaseParser#primaryExpression}. @@ -159,12 +159,26 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx); + /** + * Visit a parse tree produced by the {@code function} + * labeled alternative in {@link EsqlBaseParser#primaryExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunction(EsqlBaseParser.FunctionContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#functionExpression}. * @param ctx the parse tree * @return the visitor result */ T visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx); + /** + * Visit a parse tree produced by the {@code toDataType} + * labeled alternative in {@link EsqlBaseParser#dataType}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitToDataType(EsqlBaseParser.ToDataTypeContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#rowCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 223d318a64324..087ead8539d00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -486,6 +486,28 @@ public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionConte return new UnresolvedFunction(source(ctx), name, FunctionResolutionStrategy.DEFAULT, args); } + @Override + public Expression visitInlineCast(EsqlBaseParser.InlineCastContext ctx) { + Source source = source(ctx); + DataType dataType = typedParsing(this, ctx.dataType(), DataType.class); + var converterToFactory = EsqlDataTypeConverter.converterFunctionFactory(dataType); + if (converterToFactory == null) { + throw new ParsingException(source, "Unsupported conversion to type [{}]", dataType); + } + Expression expr = expression(ctx.primaryExpression()); + return converterToFactory.apply(source, expr); + } + + @Override + public DataType visitToDataType(EsqlBaseParser.ToDataTypeContext ctx) { + String typeName = visitIdentifier(ctx.identifier()); + DataType dataType = EsqlDataTypes.fromNameOrAlias(typeName); + if (dataType == DataTypes.UNSUPPORTED) { + throw new ParsingException(source(ctx), "Unknown data type named [{}]", typeName); + } + return dataType; + } + @Override public Expression visitLogicalBinary(EsqlBaseParser.LogicalBinaryContext ctx) { int type = ctx.operator.getType(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 28fa34fa2338d..ff157c0fe3e0b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -110,6 +110,11 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature BASE64_DECODE_ENCODE = new NodeFeature("esql.base64_decode_encode"); + /** + * Support for the :: casting operator + */ + public static final NodeFeature CASTING_OPERATOR = new NodeFeature("esql.casting_operator"); + @Override public Set getFeatures() { return Set.of( @@ -126,7 +131,8 @@ public Set getFeatures() { ST_INTERSECTS, ST_CONTAINS_WITHIN, ST_DISJOINT, - STRING_LITERAL_AUTO_CASTING + STRING_LITERAL_AUTO_CASTING, + CASTING_OPERATOR ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 386dbd50dc9ba..7c0441443bf22 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -13,9 +13,24 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.Converter; import org.elasticsearch.xpack.ql.type.DataType; @@ -34,13 +49,30 @@ import java.time.temporal.ChronoField; import java.time.temporal.TemporalAmount; import java.util.Locale; +import java.util.Map; +import java.util.function.BiFunction; import java.util.function.Function; +import static java.util.Map.entry; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; +import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; +import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; +import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; +import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.ql.type.DataTypes.IP; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; import static org.elasticsearch.xpack.ql.type.DataTypes.isPrimitive; import static org.elasticsearch.xpack.ql.type.DataTypes.isString; import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; @@ -56,6 +88,25 @@ public class EsqlDataTypeConverter { public static final DateFormatter HOUR_MINUTE_SECOND = DateFormatter.forPattern("strict_hour_minute_second_fraction"); + private static final Map> TYPE_TO_CONVERTER_FUNCTION = Map.ofEntries( + entry(BOOLEAN, ToBoolean::new), + entry(CARTESIAN_POINT, ToCartesianPoint::new), + entry(CARTESIAN_SHAPE, ToCartesianShape::new), + entry(DATETIME, ToDatetime::new), + // ToDegrees, typeless + entry(DOUBLE, ToDouble::new), + entry(GEO_POINT, ToGeoPoint::new), + entry(GEO_SHAPE, ToGeoShape::new), + entry(INTEGER, ToInteger::new), + entry(IP, ToIP::new), + entry(LONG, ToLong::new), + // ToRadians, typeless + entry(KEYWORD, ToString::new), + entry(TEXT, ToString::new), + entry(UNSIGNED_LONG, ToUnsignedLong::new), + entry(VERSION, ToVersion::new) + ); + /** * Returns true if the from type can be converted to the to type, false - otherwise */ @@ -419,4 +470,8 @@ public Object convert(Object l) { return converter.apply(l); } } + + public static BiFunction converterFunctionFactory(DataType toType) { + return TYPE_TO_CONVERTER_FUNCTION.get(toType); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index 8edee89832255..468ffcc2cba2a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -15,6 +15,7 @@ import java.util.Comparator; import java.util.Locale; import java.util.Map; +import java.util.function.Function; import java.util.stream.Stream; import static java.util.stream.Collectors.toMap; @@ -91,6 +92,15 @@ public final class EsqlDataTypes { ES_TO_TYPE = Collections.unmodifiableMap(map); } + private static final Map NAME_OR_ALIAS_TO_TYPE; + static { + Map map = TYPES.stream().collect(toMap(DataType::typeName, Function.identity())); + map.put("bool", BOOLEAN); + map.put("int", INTEGER); + map.put("string", KEYWORD); + NAME_OR_ALIAS_TO_TYPE = Collections.unmodifiableMap(map); + } + private EsqlDataTypes() {} public static Collection types() { @@ -106,6 +116,11 @@ public static DataType fromName(String name) { return type != null ? type : UNSUPPORTED; } + public static DataType fromNameOrAlias(String typeName) { + DataType type = NAME_OR_ALIAS_TO_TYPE.get(typeName.toLowerCase(Locale.ROOT)); + return type != null ? type : UNSUPPORTED; + } + public static DataType fromJava(Object value) { if (value == null) { return NULL; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 651ad2c548e55..8d9140cdda5f4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -476,6 +476,10 @@ public void testSpatialSort() { assertEquals("1:42: cannot sort on cartesian_shape", error("FROM countries_bbox_web | LIMIT 5 | sort shape", countriesBboxWeb)); } + public void testInlineImpossibleConvert() { + assertEquals("1:5: argument of [false::ip] must be [ip or string], found value [false] type [boolean]", error("ROW false::ip")); + } + private String error(String query) { return error(query, defaultAnalyzer); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 49fb3af5384b4..8901f94cd2cf6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -1027,6 +1027,18 @@ public void testEnrichOnMatchField() { assertThat(ua.name(), is("`name`* = language_name")); } + public void testInlineConvertWithNonexistentType() { + expectError("ROW 1::doesnotexist", "line 1:9: Unknown data type named [doesnotexist]"); + expectError("ROW \"1\"::doesnotexist", "line 1:11: Unknown data type named [doesnotexist]"); + expectError("ROW false::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW abs(1)::doesnotexist", "line 1:14: Unknown data type named [doesnotexist]"); + expectError("ROW (1+2)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + } + + public void testInlineConvertUnsupportedType() { + expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); + } + private LogicalPlan statement(String e) { return statement(e, List.of()); } From 87928b2303ead69e10902121324b4d671f8f8581 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 18 Apr 2024 09:09:44 -0600 Subject: [PATCH 102/130] GA data stream lifecycles in documentation (#107582) This commit removes the warning about techincal preview for the data stream lifecycle documentation pages. --- .../data-streams/lifecycle/apis/delete-lifecycle.asciidoc | 2 -- .../data-streams/lifecycle/apis/explain-lifecycle.asciidoc | 2 -- .../data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc | 4 +--- .../data-streams/lifecycle/apis/get-lifecycle.asciidoc | 2 -- .../data-streams/lifecycle/apis/put-lifecycle.asciidoc | 2 -- docs/reference/data-streams/lifecycle/index.asciidoc | 2 -- .../lifecycle/tutorial-manage-existing-data-stream.asciidoc | 2 -- .../lifecycle/tutorial-manage-new-data-stream.asciidoc | 2 -- .../tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc | 4 +--- 9 files changed, 2 insertions(+), 20 deletions(-) diff --git a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc index 0cf6ad395fb4d..f20c949c2fbc8 100644 --- a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc @@ -4,8 +4,6 @@ Delete Data Stream Lifecycle ++++ -preview::[] - Deletes the lifecycle from a set of data streams. [[delete-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc index e0e2df217335f..b739751ca5b02 100644 --- a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc @@ -4,8 +4,6 @@ Explain Data Stream Lifecycle ++++ -preview::[] - Retrieves the current data stream lifecycle status for one or more data stream backing indices. [[explain-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc index 6fa82dc2a810c..a99fa19d9db8d 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle-stats.asciidoc @@ -4,8 +4,6 @@ Get Data Stream Lifecycle ++++ -preview::[] - Gets stats about the execution of data stream lifecycle. [[get-lifecycle-stats-api-prereqs]] @@ -90,4 +88,4 @@ The response will look like the following: } ] } --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index 0d80a31bd4f5a..331285af395b6 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -4,8 +4,6 @@ Get Data Stream Lifecycle ++++ -preview::[] - Gets the lifecycle of a set of data streams. [[get-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc index e68dc24f11a57..6bd157071f54e 100644 --- a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc @@ -4,8 +4,6 @@ Put Data Stream Lifecycle ++++ -preview::[] - Configures the data stream lifecycle for the targeted data streams. [[put-lifecycle-api-prereqs]] diff --git a/docs/reference/data-streams/lifecycle/index.asciidoc b/docs/reference/data-streams/lifecycle/index.asciidoc index bf861df7c80d4..16ccf2ef82391 100644 --- a/docs/reference/data-streams/lifecycle/index.asciidoc +++ b/docs/reference/data-streams/lifecycle/index.asciidoc @@ -2,8 +2,6 @@ [[data-stream-lifecycle]] == Data stream lifecycle -preview::[] - A data stream lifecycle is the built-in mechanism data streams use to manage their lifecycle. It enables you to easily automate the management of your data streams according to your retention requirements. For example, you could configure the lifecycle to: diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc index 5670faaade3ce..56c26d42d3ffb 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc @@ -2,8 +2,6 @@ [[tutorial-manage-existing-data-stream]] === Tutorial: Update existing data stream -preview::[] - To update the lifecycle of an existing data stream you do the following actions: . <> diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc index 6f1d81ab6ead2..c34340a096046 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc @@ -2,8 +2,6 @@ [[tutorial-manage-new-data-stream]] === Tutorial: Create a data stream with a lifecycle -preview::[] - To create a data stream with a built-in lifecycle, follow these steps: . <> diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc index 3125c82120d8d..6bfa9ad9b00c5 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -2,9 +2,7 @@ [[tutorial-migrate-data-stream-from-ilm-to-dsl]] === Tutorial: Migrate ILM managed data stream to data stream lifecycle -preview::[] - -In this tutorial we'll look at migrating an existing data stream from Index Lifecycle Management ({ilm-init}) to +In this tutorial we'll look at migrating an existing data stream from Index Lifecycle Management ({ilm-init}) to data stream lifecycle. The existing {ilm-init} managed backing indices will continue to be managed by {ilm-init} until they age out and get deleted by {ilm-init}; however, the new backing indices will be managed by data stream lifecycle. From 4ff03a2c4b5f755d8c7c523f39907863cde55489 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 18 Apr 2024 17:18:34 +0200 Subject: [PATCH 103/130] ES|QL: make RestEsqlTestCase.testWarningHeadersOnFailedConversions deterministic (#107616) The number of warnings emitted depends on the number of nodes where the query is executed. --- .../elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 76fbbcfb71d79..0f40c2d0a35eb 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -63,6 +63,7 @@ import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -496,13 +497,13 @@ public void testWarningHeadersOnFailedConversions() throws IOException { Response response = client().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); - int expectedWarnings = Math.min(count / 2, 20); + int minExpectedWarnings = Math.min(count / 2, 20); var warnings = response.getWarnings(); - assertThat(warnings.size(), is(1 + expectedWarnings)); + assertThat(warnings.size(), is(greaterThanOrEqualTo(1 + minExpectedWarnings))); // in multi-node there could be more var firstHeader = "Line 1:55: evaluation of [to_int(case(integer %25 2 == 0, to_str(integer), keyword))] failed, " + "treating result as null. Only first 20 failures recorded."; assertThat(warnings.get(0), containsString(firstHeader)); - for (int i = 1; i <= expectedWarnings; i++) { + for (int i = 1; i < warnings.size(); i++) { assertThat( warnings.get(i), containsString("org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [keyword") From b44ce50f64435d0738426551b42c950fbd59ed12 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 18 Apr 2024 11:44:21 -0400 Subject: [PATCH 104/130] [ESQL] Comparison testing followup (#107583) --- .../comparison/EsqlBinaryComparison.java | 2 +- .../function/AbstractFunctionTestCase.java | 15 ++ .../AbstractBinaryComparisonTestCase.java | 130 ------------------ .../comparison/GreaterThanOrEqualTests.java | 119 ++++++++++++++-- .../operator/comparison/GreaterThanTests.java | 119 ++++++++++++++-- .../comparison/LessThanOrEqualTests.java | 118 ++++++++++++++-- .../operator/comparison/LessThanTests.java | 118 ++++++++++++++-- 7 files changed, 436 insertions(+), 185 deletions(-) delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java index 58a808893c4c6..cb3da49038a7d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EsqlBinaryComparison.java @@ -103,7 +103,7 @@ protected TypeResolution resolveInputType(Expression e, TypeResolutions.ParamOrd evaluatorMap::containsKey, sourceText(), paramOrdinal, - evaluatorMap.keySet().stream().map(DataType::typeName).toArray(String[]::new) + evaluatorMap.keySet().stream().map(DataType::typeName).sorted().toArray(String[]::new) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index dd7bb6bec0cbe..366c9805bf21c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -962,6 +962,21 @@ protected static String typeErrorMessage(boolean includeOrdinal, List resultMatcher(List data, DataType dataType) { - Comparable lhs = (Comparable) data.get(0); - Comparable rhs = (Comparable) data.get(1); - if (lhs instanceof Double || rhs instanceof Double) { - return (Matcher) (Matcher) resultMatcher(((Number) lhs).doubleValue(), ((Number) rhs).doubleValue()); - } - if (lhs instanceof Long || rhs instanceof Long) { - return (Matcher) (Matcher) resultMatcher(((Number) lhs).longValue(), ((Number) rhs).longValue()); - } - if (lhs instanceof Integer || rhs instanceof Integer) { - return (Matcher) (Matcher) resultMatcher(((Number) lhs).intValue(), ((Number) rhs).intValue()); - } - return (Matcher) (Matcher) resultMatcher(lhs, rhs); - } - - @Override - protected Matcher resultsMatcher(List typedData) { - Number lhs = (Number) typedData.get(0).data(); - Number rhs = (Number) typedData.get(1).data(); - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.DOUBLE))) { - return equalTo(resultMatcher(lhs.doubleValue(), rhs.doubleValue())); - } - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.UNSIGNED_LONG))) { - // TODO: Is this correct behavior for unsigned long? - return resultMatcher(lhs.longValue(), rhs.longValue()); - } - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.LONG))) { - return resultMatcher(lhs.longValue(), rhs.longValue()); - } - if (typedData.stream().anyMatch(t -> t.type().equals(DataTypes.INTEGER))) { - return resultMatcher(lhs.intValue(), rhs.intValue()); - } - throw new UnsupportedOperationException(); - } - - protected abstract > Matcher resultMatcher(T lhs, T rhs); - - protected abstract boolean isEquality(); - - @Override - protected final boolean supportsType(DataType type) { - // Boolean and Spatial types do not support inequality operators - if (type == DataTypes.BOOLEAN || isSpatial(type)) { - return isEquality(); - } - return EsqlDataTypes.isRepresentable(type); - } - - @Override - protected boolean supportsTypes(DataType lhsType, DataType rhsType) { - return super.supportsTypes(lhsType, rhsType) && (lhsType == rhsType || lhsType.isNumeric() && rhsType.isNumeric()); - } - - @Override - protected final void validateType(BinaryOperator op, DataType lhsType, DataType rhsType) { - assertTrue(op.typeResolved().resolved()); - assertThat(op.dataType(), equalTo(DataTypes.BOOLEAN)); - Failure f = Verifier.validateBinaryComparison((BinaryComparison) op); - if (isEquality() == false && lhsType == DataTypes.BOOLEAN) { - assertThat(op.toString(), f, not(nullValue())); - assertThat( - op.toString(), - f.message(), - equalTo( - String.format( - Locale.ROOT, - "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime, version, geo_point, " - + "geo_shape, cartesian_point or cartesian_shape], found value [] type [%s]", - lhsType.typeName(), - rhsType.typeName(), - lhsType.typeName() - ) - ) - ); - return; - } - if (lhsType == rhsType - || lhsType.isNumeric() && rhsType.isNumeric() - || DataTypes.isString(lhsType) && DataTypes.isString(rhsType)) { - assertThat(op.toString(), f, nullValue()); - return; - } - assertThat(op.toString(), f, not(nullValue())); - assertThat( - op.toString(), - f.message(), - equalTo( - String.format( - Locale.ROOT, - "first argument of [%s %s] is [%s] so second argument must also be [%s] but was [%s]", - lhsType.typeName(), - rhsType.typeName(), - lhsType.isNumeric() ? "numeric" : lhsType.typeName(), - lhsType.isNumeric() ? "numeric" : lhsType.typeName(), - rhsType.typeName() - ) - ) - ); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index f45dedff837c4..2ccd6fd5b8b93 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -10,18 +10,20 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - public class GreaterThanOrEqualTests extends AbstractFunctionTestCase { public GreaterThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -29,20 +31,109 @@ public GreaterThanOrEqualTests(@Name("TestCase") Supplier parameters() { - // ToDo: Add the full set of typed test cases here - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int >= Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() >= r.intValue(), + "GreaterThanOrEqualIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() >= r.longValue(), + "GreaterThanOrEqualLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() >= r.doubleValue(), + "GreaterThanOrEqualDoublesEvaluator" + ) ), - "GreaterThanOrEqualIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + (lhs, rhs) -> List.of(), + false + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanOrEqualLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) >= 0, DataTypes.BOOLEAN, - equalTo(lhs >= rhs) - ); - }))); + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanOrEqualKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) >= 0, + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanOrEqualKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) >= 0, + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanOrEqualLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((Number) l).longValue() >= ((Number) r).longValue(), + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.stringCases( + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) >= 0, + (lhsType, rhsType) -> "GreaterThanOrEqualKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); + + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index e872af5b7c772..43408396ea8d0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -10,18 +10,20 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - public class GreaterThanTests extends AbstractFunctionTestCase { public GreaterThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -29,20 +31,109 @@ public GreaterThanTests(@Name("TestCase") Supplier te @ParametersFactory public static Iterable parameters() { - // ToDo: Add the full set of typed test cases here - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int > Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() > r.intValue(), + "GreaterThanIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() > r.longValue(), + "GreaterThanLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() > r.doubleValue(), + "GreaterThanDoublesEvaluator" + ) ), - "GreaterThanIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + (lhs, rhs) -> List.of(), + false + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) > 0, DataTypes.BOOLEAN, - equalTo(lhs > rhs) - ); - }))); + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) > 0, + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) > 0, + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "GreaterThanLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((Number) l).longValue() > ((Number) r).longValue(), + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.stringCases( + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) > 0, + (lhsType, rhsType) -> "GreaterThanKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); + + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 8bba0c4a5afb5..ba2c52d8e873a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -10,18 +10,20 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - public class LessThanOrEqualTests extends AbstractFunctionTestCase { public LessThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -29,19 +31,109 @@ public LessThanOrEqualTests(@Name("TestCase") Supplier parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int <= Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() <= r.intValue(), + "LessThanOrEqualIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() <= r.longValue(), + "LessThanOrEqualLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() <= r.doubleValue(), + "LessThanOrEqualDoublesEvaluator" + ) ), - "LessThanOrEqualIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + (lhs, rhs) -> List.of(), + false + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanOrEqualLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) <= 0, DataTypes.BOOLEAN, - equalTo(lhs <= rhs) - ); - }))); + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanOrEqualKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) <= 0, + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanOrEqualKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) <= 0, + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanOrEqualLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((Number) l).longValue() <= ((Number) r).longValue(), + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.stringCases( + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) <= 0, + (lhsType, rhsType) -> "LessThanOrEqualKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); + + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index ab726dc51fbe4..62d59e5972caa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -10,18 +10,20 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import java.math.BigInteger; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - public class LessThanTests extends AbstractFunctionTestCase { public LessThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -29,19 +31,109 @@ public LessThanTests(@Name("TestCase") Supplier testC @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int < Int", () -> { - int rhs = randomInt(); - int lhs = randomInt(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() < r.intValue(), + "LessThanIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() < r.longValue(), + "LessThanLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> l.doubleValue() < r.doubleValue(), + "LessThanDoublesEvaluator" + ) ), - "LessThanIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + "lhs", + "rhs", + (lhs, rhs) -> List.of(), + false + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BigInteger) l).compareTo((BigInteger) r) < 0, DataTypes.BOOLEAN, - equalTo(lhs < rhs) - ); - }))); + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) < 0, + DataTypes.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanKeywordsEvaluator", + "lhs", + "rhs", + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) < 0, + DataTypes.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + // TODO: I'm surprised this passes. Shouldn't there be a cast from DateTime to Long? + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "LessThanLongsEvaluator", + "lhs", + "rhs", + (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), + DataTypes.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.stringCases( + (l, r) -> ((BytesRef) l).compareTo((BytesRef) r) < 0, + (lhsType, rhsType) -> "LessThanKeywordsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", + List.of(), + DataTypes.BOOLEAN + ) + ); + + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + ); } @Override From 8594954cdf1107d4ae6d18ecb440ced8fe06845b Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 18 Apr 2024 18:09:12 +0200 Subject: [PATCH 105/130] Wait correctly for search tasks to complete on node shutdown. (#107426) If using TimeValue.ZERO for `node.maximum_shutdown_grace_period`, nodes should wait indefinitely for search tasks to complete on shutdown. --- docs/changelog/107426.yaml | 5 +++++ server/src/main/java/org/elasticsearch/node/Node.java | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/107426.yaml diff --git a/docs/changelog/107426.yaml b/docs/changelog/107426.yaml new file mode 100644 index 0000000000000..2feed3df56108 --- /dev/null +++ b/docs/changelog/107426.yaml @@ -0,0 +1,5 @@ +pr: 107426 +summary: Support wait indefinitely for search tasks to complete on node shutdown +area: Infra/Node Lifecycle +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 801a2038fc06b..11eb8760b2dbb 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -155,7 +155,7 @@ public class Node implements Closeable { public static final Setting MAXIMUM_SHUTDOWN_TIMEOUT_SETTING = Setting.positiveTimeSetting( "node.maximum_shutdown_grace_period", - TimeValue.timeValueMillis(0), + TimeValue.ZERO, Setting.Property.NodeScope ); @@ -617,7 +617,7 @@ public void prepareForClose() { CompletableFuture allStoppers = CompletableFuture.allOf(futures.values().toArray(new CompletableFuture[stoppers.size()])); try { - if (maxTimeout.millis() == 0) { + if (TimeValue.ZERO.equals(maxTimeout)) { FutureUtils.get(allStoppers); } else { FutureUtils.get(allStoppers, maxTimeout.millis(), TimeUnit.MILLISECONDS); @@ -652,7 +652,7 @@ private void awaitSearchTasksComplete(TimeValue asyncSearchTimeout) { // be spending on finishing those searches. final TimeValue pollPeriod = TimeValue.timeValueMillis(500); millisWaited += pollPeriod.millis(); - if (millisWaited >= asyncSearchTimeout.millis()) { + if (TimeValue.ZERO.equals(asyncSearchTimeout) == false && millisWaited >= asyncSearchTimeout.millis()) { logger.warn( format( "timed out after waiting [%s] for [%d] search tasks to finish", From ead585f9620e94819a6c83d6c953422ed0a311e0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 18 Apr 2024 12:14:49 -0400 Subject: [PATCH 106/130] ESQL: Add lookup flavor to `MultivalueDedupe` (#107624) The infrastruction that powers aggregation grouping uses a method called `MultivalueDedupe#hash` which deduplicates all the values in a row, adds them to the hash, and then returns all of the ordinals it added. This renames that method to `hashAdd` and adds another method `hashLookup` which performs the same deduplication and then looks the values up in the hash instead of adding them. It produces the same values as `hashAdd` would return, except when the hash doesn't contain the value - in that case it just adds `null` to the block of ordinals. Think of this as looking up what the aggregation ordinal would be, rather than building ordinals. I have big plans for this. --- .../operator/MultivalueDedupeBytesRef.java | 218 +++++++++++++-- .../operator/MultivalueDedupeDouble.java | 218 +++++++++++++-- .../compute/operator/MultivalueDedupeInt.java | 218 +++++++++++++-- .../operator/MultivalueDedupeLong.java | 218 +++++++++++++-- .../blockhash/BytesRefBlockHash.java | 3 +- .../blockhash/DoubleBlockHash.java | 16 +- .../aggregation/blockhash/IntBlockHash.java | 2 +- .../aggregation/blockhash/LongBlockHash.java | 2 +- .../blockhash/PackedValuesBlockHash.java | 6 +- .../operator/X-MultivalueDedupe.java.st | 261 ++++++++++++++++-- .../compute/data/BasicBlockTests.java | 35 +++ .../operator/MultivalueDedupeTests.java | 138 +++++++-- 12 files changed, 1213 insertions(+), 122 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java index 422f0bd65a28e..a7bac156057bd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java @@ -168,10 +168,11 @@ public BytesRefBlock sortToBlock(BlockFactory blockFactory, boolean ascending) { } /** - * Dedupe values and build a {@link IntBlock} suitable for passing - * as the grouping block to a {@link GroupingAggregatorFunction}. + * Dedupe values, add them to the hash, and build an {@link IntBlock} of + * their hashes. This block is suitable for passing as the grouping block + * to a {@link GroupingAggregatorFunction}. */ - public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, BytesRefHash hash) { + public MultivalueDedupe.HashResult hashAdd(BlockFactory blockFactory, BytesRefHash hash) { try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { @@ -184,15 +185,15 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, BytesRefHash } case 1 -> { BytesRef v = block.getBytesRef(first, work[0]); - hash(builder, hash, v); + hashAdd(builder, hash, v); } default -> { if (count < ALWAYS_COPY_MISSING) { copyMissing(first, count); - hashUniquedWork(hash, builder); + hashAddUniquedWork(hash, builder); } else { copyAndSort(first, count); - hashSortedWork(hash, builder); + hashAddSortedWork(hash, builder); } } } @@ -201,6 +202,36 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, BytesRefHash } } + /** + * Dedupe values and build an {@link IntBlock} of their hashes. This block is + * suitable for passing as the grouping block to a {@link GroupingAggregatorFunction}. + */ + public IntBlock hashLookup(BlockFactory blockFactory, BytesRefHash hash) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { + for (int p = 0; p < block.getPositionCount(); p++) { + int count = block.getValueCount(p); + int first = block.getFirstValueIndex(p); + switch (count) { + case 0 -> builder.appendInt(0); + case 1 -> { + BytesRef v = block.getBytesRef(first, work[0]); + hashLookupSingle(builder, hash, v); + } + default -> { + if (count < ALWAYS_COPY_MISSING) { + copyMissing(first, count); + hashLookupUniquedWork(hash, builder); + } else { + copyAndSort(first, count); + hashLookupSortedWork(hash, builder); + } + } + } + } + return builder.build(); + } + } + /** * Build a {@link BatchEncoder} which deduplicates values at each position * and then encodes the results into a {@link byte[]} which can be used for @@ -352,14 +383,14 @@ private void writeSortedWork(BytesRefBlock.Builder builder, boolean ascending) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { + private void hashAddUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); for (int i = 0; i < w; i++) { - hash(builder, hash, work[i]); + hashAdd(builder, hash, work[i]); } builder.endPositionEntry(); } @@ -367,23 +398,155 @@ private void hashUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(BytesRefHash hash, IntBlock.Builder builder) { + private void hashAddSortedWork(BytesRefHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); BytesRef prev = work[0]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); for (int i = 1; i < w; i++) { - if (false == prev.equals(work[i])) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); } } builder.endPositionEntry(); } + /** + * Looks up an already deduplicated {@link #work} to a hash. + */ + private void hashLookupUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + int i = 1; + long firstLookup = hashLookup(hash, work[0]); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + firstLookup = hashLookup(hash, work[i]); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + appendFound(builder, nextLookup); + } + i++; + } + builder.endPositionEntry(); + } + + /** + * Looks up a sorted {@link #work} to a hash, skipping duplicates. + */ + private void hashLookupSortedWork(BytesRefHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + /* + * Step 1 - find the first unique value in the hash + * i will contain the next value to probe + * prev will contain the first value in the array contained in the hash + * firstLookup will contain the first value in the hash + */ + int i = 1; + BytesRef prev = work[0]; + long firstLookup = hashLookup(hash, prev); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + prev = work[i]; + firstLookup = hashLookup(hash, prev); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + appendFound(builder, nextLookup); + } + } + i++; + } + builder.endPositionEntry(); + } + /** * Writes a deduplicated {@link #work} to a {@link BatchEncoder.BytesRefs}. */ @@ -401,7 +564,7 @@ private void convertSortedWorkToUnique() { int end = w; w = 1; for (int i = 1; i < end; i++) { - if (false == prev.equals(work[i])) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; work[w].bytes = prev.bytes; work[w].offset = prev.offset; @@ -423,7 +586,28 @@ private void fillWork(int from, int to) { } } - private void hash(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { - builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); + private void hashAdd(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { + appendFound(builder, hash.add(v)); + } + + private long hashLookup(BytesRefHash hash, BytesRef v) { + return hash.find(v); + } + + private void hashLookupSingle(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { + long found = hashLookup(hash, v); + if (found >= 0) { + appendFound(builder, found); + } else { + builder.appendNull(); + } + } + + private void appendFound(IntBlock.Builder builder, long found) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(found))); + } + + private static boolean valuesEqual(BytesRef lhs, BytesRef rhs) { + return lhs.equals(rhs); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java index 49e515c62f13a..33af1178a183f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeDouble.java @@ -165,10 +165,11 @@ public DoubleBlock sortToBlock(BlockFactory blockFactory, boolean ascending) { } /** - * Dedupe values and build a {@link IntBlock} suitable for passing - * as the grouping block to a {@link GroupingAggregatorFunction}. + * Dedupe values, add them to the hash, and build an {@link IntBlock} of + * their hashes. This block is suitable for passing as the grouping block + * to a {@link GroupingAggregatorFunction}. */ - public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash) { + public MultivalueDedupe.HashResult hashAdd(BlockFactory blockFactory, LongHash hash) { try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { @@ -181,15 +182,15 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash } case 1 -> { double v = block.getDouble(first); - hash(builder, hash, v); + hashAdd(builder, hash, v); } default -> { if (count < ALWAYS_COPY_MISSING) { copyMissing(first, count); - hashUniquedWork(hash, builder); + hashAddUniquedWork(hash, builder); } else { copyAndSort(first, count); - hashSortedWork(hash, builder); + hashAddSortedWork(hash, builder); } } } @@ -198,6 +199,36 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash } } + /** + * Dedupe values and build an {@link IntBlock} of their hashes. This block is + * suitable for passing as the grouping block to a {@link GroupingAggregatorFunction}. + */ + public IntBlock hashLookup(BlockFactory blockFactory, LongHash hash) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { + for (int p = 0; p < block.getPositionCount(); p++) { + int count = block.getValueCount(p); + int first = block.getFirstValueIndex(p); + switch (count) { + case 0 -> builder.appendInt(0); + case 1 -> { + double v = block.getDouble(first); + hashLookupSingle(builder, hash, v); + } + default -> { + if (count < ALWAYS_COPY_MISSING) { + copyMissing(first, count); + hashLookupUniquedWork(hash, builder); + } else { + copyAndSort(first, count); + hashLookupSortedWork(hash, builder); + } + } + } + } + return builder.build(); + } + } + /** * Build a {@link BatchEncoder} which deduplicates values at each position * and then encodes the results into a {@link byte[]} which can be used for @@ -341,14 +372,14 @@ private void writeSortedWork(DoubleBlock.Builder builder, boolean ascending) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddUniquedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); for (int i = 0; i < w; i++) { - hash(builder, hash, work[i]); + hashAdd(builder, hash, work[i]); } builder.endPositionEntry(); } @@ -356,23 +387,155 @@ private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddSortedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); double prev = work[0]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); for (int i = 1; i < w; i++) { - if (prev != work[i]) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); } } builder.endPositionEntry(); } + /** + * Looks up an already deduplicated {@link #work} to a hash. + */ + private void hashLookupUniquedWork(LongHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + int i = 1; + long firstLookup = hashLookup(hash, work[0]); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + firstLookup = hashLookup(hash, work[i]); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + appendFound(builder, nextLookup); + } + i++; + } + builder.endPositionEntry(); + } + + /** + * Looks up a sorted {@link #work} to a hash, skipping duplicates. + */ + private void hashLookupSortedWork(LongHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + /* + * Step 1 - find the first unique value in the hash + * i will contain the next value to probe + * prev will contain the first value in the array contained in the hash + * firstLookup will contain the first value in the hash + */ + int i = 1; + double prev = work[0]; + long firstLookup = hashLookup(hash, prev); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + prev = work[i]; + firstLookup = hashLookup(hash, prev); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + appendFound(builder, nextLookup); + } + } + i++; + } + builder.endPositionEntry(); + } + /** * Writes a deduplicated {@link #work} to a {@link BatchEncoder.Doubles}. */ @@ -390,7 +553,7 @@ private void convertSortedWorkToUnique() { int end = w; w = 1; for (int i = 1; i < end; i++) { - if (prev != work[i]) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; work[w++] = prev; } @@ -401,7 +564,28 @@ private void grow(int size) { work = ArrayUtil.grow(work, size); } - private void hash(IntBlock.Builder builder, LongHash hash, double v) { - builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(Double.doubleToLongBits(v))))); + private void hashAdd(IntBlock.Builder builder, LongHash hash, double v) { + appendFound(builder, hash.add(Double.doubleToLongBits(v))); + } + + private long hashLookup(LongHash hash, double v) { + return hash.find(Double.doubleToLongBits(v)); + } + + private void hashLookupSingle(IntBlock.Builder builder, LongHash hash, double v) { + long found = hashLookup(hash, v); + if (found >= 0) { + appendFound(builder, found); + } else { + builder.appendNull(); + } + } + + private void appendFound(IntBlock.Builder builder, long found) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(found))); + } + + private static boolean valuesEqual(double lhs, double rhs) { + return lhs == rhs; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java index 086ec2b5ca4b7..76a607566d7d4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeInt.java @@ -165,10 +165,11 @@ public IntBlock sortToBlock(BlockFactory blockFactory, boolean ascending) { } /** - * Dedupe values and build a {@link IntBlock} suitable for passing - * as the grouping block to a {@link GroupingAggregatorFunction}. + * Dedupe values, add them to the hash, and build an {@link IntBlock} of + * their hashes. This block is suitable for passing as the grouping block + * to a {@link GroupingAggregatorFunction}. */ - public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash) { + public MultivalueDedupe.HashResult hashAdd(BlockFactory blockFactory, LongHash hash) { try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { @@ -181,15 +182,15 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash } case 1 -> { int v = block.getInt(first); - hash(builder, hash, v); + hashAdd(builder, hash, v); } default -> { if (count < ALWAYS_COPY_MISSING) { copyMissing(first, count); - hashUniquedWork(hash, builder); + hashAddUniquedWork(hash, builder); } else { copyAndSort(first, count); - hashSortedWork(hash, builder); + hashAddSortedWork(hash, builder); } } } @@ -198,6 +199,36 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash } } + /** + * Dedupe values and build an {@link IntBlock} of their hashes. This block is + * suitable for passing as the grouping block to a {@link GroupingAggregatorFunction}. + */ + public IntBlock hashLookup(BlockFactory blockFactory, LongHash hash) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { + for (int p = 0; p < block.getPositionCount(); p++) { + int count = block.getValueCount(p); + int first = block.getFirstValueIndex(p); + switch (count) { + case 0 -> builder.appendInt(0); + case 1 -> { + int v = block.getInt(first); + hashLookupSingle(builder, hash, v); + } + default -> { + if (count < ALWAYS_COPY_MISSING) { + copyMissing(first, count); + hashLookupUniquedWork(hash, builder); + } else { + copyAndSort(first, count); + hashLookupSortedWork(hash, builder); + } + } + } + } + return builder.build(); + } + } + /** * Build a {@link BatchEncoder} which deduplicates values at each position * and then encodes the results into a {@link byte[]} which can be used for @@ -341,14 +372,14 @@ private void writeSortedWork(IntBlock.Builder builder, boolean ascending) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddUniquedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); for (int i = 0; i < w; i++) { - hash(builder, hash, work[i]); + hashAdd(builder, hash, work[i]); } builder.endPositionEntry(); } @@ -356,23 +387,155 @@ private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddSortedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); int prev = work[0]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); for (int i = 1; i < w; i++) { - if (prev != work[i]) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); } } builder.endPositionEntry(); } + /** + * Looks up an already deduplicated {@link #work} to a hash. + */ + private void hashLookupUniquedWork(LongHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + int i = 1; + long firstLookup = hashLookup(hash, work[0]); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + firstLookup = hashLookup(hash, work[i]); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + appendFound(builder, nextLookup); + } + i++; + } + builder.endPositionEntry(); + } + + /** + * Looks up a sorted {@link #work} to a hash, skipping duplicates. + */ + private void hashLookupSortedWork(LongHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + /* + * Step 1 - find the first unique value in the hash + * i will contain the next value to probe + * prev will contain the first value in the array contained in the hash + * firstLookup will contain the first value in the hash + */ + int i = 1; + int prev = work[0]; + long firstLookup = hashLookup(hash, prev); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + prev = work[i]; + firstLookup = hashLookup(hash, prev); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + appendFound(builder, nextLookup); + } + } + i++; + } + builder.endPositionEntry(); + } + /** * Writes a deduplicated {@link #work} to a {@link BatchEncoder.Ints}. */ @@ -390,7 +553,7 @@ private void convertSortedWorkToUnique() { int end = w; w = 1; for (int i = 1; i < end; i++) { - if (prev != work[i]) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; work[w++] = prev; } @@ -401,7 +564,28 @@ private void grow(int size) { work = ArrayUtil.grow(work, size); } - private void hash(IntBlock.Builder builder, LongHash hash, int v) { - builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); + private void hashAdd(IntBlock.Builder builder, LongHash hash, int v) { + appendFound(builder, hash.add(v)); + } + + private long hashLookup(LongHash hash, int v) { + return hash.find(v); + } + + private void hashLookupSingle(IntBlock.Builder builder, LongHash hash, int v) { + long found = hashLookup(hash, v); + if (found >= 0) { + appendFound(builder, found); + } else { + builder.appendNull(); + } + } + + private void appendFound(IntBlock.Builder builder, long found) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(found))); + } + + private static boolean valuesEqual(int lhs, int rhs) { + return lhs == rhs; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java index 0eefb1b7e96a2..c2349301b0ba8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/MultivalueDedupeLong.java @@ -166,10 +166,11 @@ public LongBlock sortToBlock(BlockFactory blockFactory, boolean ascending) { } /** - * Dedupe values and build a {@link IntBlock} suitable for passing - * as the grouping block to a {@link GroupingAggregatorFunction}. + * Dedupe values, add them to the hash, and build an {@link IntBlock} of + * their hashes. This block is suitable for passing as the grouping block + * to a {@link GroupingAggregatorFunction}. */ - public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash) { + public MultivalueDedupe.HashResult hashAdd(BlockFactory blockFactory, LongHash hash) { try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { boolean sawNull = false; for (int p = 0; p < block.getPositionCount(); p++) { @@ -182,15 +183,15 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash } case 1 -> { long v = block.getLong(first); - hash(builder, hash, v); + hashAdd(builder, hash, v); } default -> { if (count < ALWAYS_COPY_MISSING) { copyMissing(first, count); - hashUniquedWork(hash, builder); + hashAddUniquedWork(hash, builder); } else { copyAndSort(first, count); - hashSortedWork(hash, builder); + hashAddSortedWork(hash, builder); } } } @@ -199,6 +200,36 @@ public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash } } + /** + * Dedupe values and build an {@link IntBlock} of their hashes. This block is + * suitable for passing as the grouping block to a {@link GroupingAggregatorFunction}. + */ + public IntBlock hashLookup(BlockFactory blockFactory, LongHash hash) { + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { + for (int p = 0; p < block.getPositionCount(); p++) { + int count = block.getValueCount(p); + int first = block.getFirstValueIndex(p); + switch (count) { + case 0 -> builder.appendInt(0); + case 1 -> { + long v = block.getLong(first); + hashLookupSingle(builder, hash, v); + } + default -> { + if (count < ALWAYS_COPY_MISSING) { + copyMissing(first, count); + hashLookupUniquedWork(hash, builder); + } else { + copyAndSort(first, count); + hashLookupSortedWork(hash, builder); + } + } + } + } + return builder.build(); + } + } + /** * Build a {@link BatchEncoder} which deduplicates values at each position * and then encodes the results into a {@link byte[]} which can be used for @@ -342,14 +373,14 @@ private void writeSortedWork(LongBlock.Builder builder, boolean ascending) { /** * Writes an already deduplicated {@link #work} to a hash. */ - private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddUniquedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); for (int i = 0; i < w; i++) { - hash(builder, hash, work[i]); + hashAdd(builder, hash, work[i]); } builder.endPositionEntry(); } @@ -357,23 +388,155 @@ private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { /** * Writes a sorted {@link #work} to a hash, skipping duplicates. */ - private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddSortedWork(LongHash hash, IntBlock.Builder builder) { if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); long prev = work[0]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); for (int i = 1; i < w; i++) { - if (prev != work[i]) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); } } builder.endPositionEntry(); } + /** + * Looks up an already deduplicated {@link #work} to a hash. + */ + private void hashLookupUniquedWork(LongHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + int i = 1; + long firstLookup = hashLookup(hash, work[0]); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + firstLookup = hashLookup(hash, work[i]); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + appendFound(builder, nextLookup); + } + i++; + } + builder.endPositionEntry(); + } + + /** + * Looks up a sorted {@link #work} to a hash, skipping duplicates. + */ + private void hashLookupSortedWork(LongHash hash, IntBlock.Builder builder) { + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + /* + * Step 1 - find the first unique value in the hash + * i will contain the next value to probe + * prev will contain the first value in the array contained in the hash + * firstLookup will contain the first value in the hash + */ + int i = 1; + long prev = work[0]; + long firstLookup = hashLookup(hash, prev); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + prev = work[i]; + firstLookup = hashLookup(hash, prev); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + appendFound(builder, nextLookup); + } + } + i++; + } + builder.endPositionEntry(); + } + /** * Writes a deduplicated {@link #work} to a {@link BatchEncoder.Longs}. */ @@ -391,7 +554,7 @@ private void convertSortedWorkToUnique() { int end = w; w = 1; for (int i = 1; i < end; i++) { - if (prev != work[i]) { + if (false == valuesEqual(prev, work[i])) { prev = work[i]; work[w++] = prev; } @@ -402,7 +565,28 @@ private void grow(int size) { work = ArrayUtil.grow(work, size); } - private void hash(IntBlock.Builder builder, LongHash hash, long v) { - builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); + private void hashAdd(IntBlock.Builder builder, LongHash hash, long v) { + appendFound(builder, hash.add(v)); + } + + private long hashLookup(LongHash hash, long v) { + return hash.find(v); + } + + private void hashLookupSingle(IntBlock.Builder builder, LongHash hash, long v) { + long found = hashLookup(hash, v); + if (found >= 0) { + appendFound(builder, found); + } else { + builder.appendNull(); + } + } + + private void appendFound(IntBlock.Builder builder, long found) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(found))); + } + + private static boolean valuesEqual(long lhs, long rhs) { + return lhs == rhs; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index f368852ef78fb..8ab1e0aaf23dd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -86,8 +86,7 @@ private IntVector add(BytesRefVector vector) { } private IntBlock add(BytesRefBlock block) { - // TODO: use block factory - MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(block).hash(blockFactory, bytesRefHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(block).hashAdd(blockFactory, bytesRefHash); seenNull |= result.sawNull(); return result.ords(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index fe15a21a4beb0..b6ce0ebbe81de 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -80,11 +80,25 @@ private IntVector add(DoubleVector vector) { } private IntBlock add(DoubleBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(block).hash(blockFactory, longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(block).hashAdd(blockFactory, longHash); seenNull |= result.sawNull(); return result.ords(); } + private IntVector lookup(DoubleVector vector) { + int positions = vector.getPositionCount(); + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { + for (int i = 0; i < positions; i++) { + builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(Double.doubleToLongBits(vector.getDouble(i)))))); + } + return builder.build(); + } + } + + private IntBlock lookup(DoubleBlock block) { + return new MultivalueDedupeDouble(block).hashLookup(blockFactory, longHash); + } + @Override public DoubleBlock[] getKeys() { if (seenNull) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 47911c61fd704..935e9056e4b1a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -77,7 +77,7 @@ private IntVector add(IntVector vector) { } private IntBlock add(IntBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeInt(block).hash(blockFactory, longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeInt(block).hashAdd(blockFactory, longHash); seenNull |= result.sawNull(); return result.ords(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 639d9cf48a515..440b5de829a92 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -80,7 +80,7 @@ private IntVector add(LongVector vector) { } private IntBlock add(LongBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeLong(block).hash(blockFactory, longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeLong(block).hashAdd(blockFactory, longHash); seenNull |= result.sawNull(); return result.ords(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index 1e6a6b790bba8..a256f09c188f1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -106,9 +106,9 @@ class AddWork extends LongLongBlockHash.AbstractAddBlock { } /** - * Encodes one permutation of the keys at time into {@link #bytes}. The encoding is - * mostly provided by {@link BatchEncoder} with nulls living in a bit mask at the - * front of the bytes. + * Encodes one permutation of the keys at time into {@link #bytes} and adds it + * to the {@link #bytesRefHash}. The encoding is mostly provided by + * {@link BatchEncoder} with nulls living in a bit mask at the front of the bytes. */ void add() { for (position = 0; position < positionCount; position++) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st index f1086489cc07a..7f33dbd4662fc 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st @@ -206,13 +206,14 @@ $endif$ } /** - * Dedupe values and build a {@link IntBlock} suitable for passing - * as the grouping block to a {@link GroupingAggregatorFunction}. + * Dedupe values, add them to the hash, and build an {@link IntBlock} of + * their hashes. This block is suitable for passing as the grouping block + * to a {@link GroupingAggregatorFunction}. */ $if(BytesRef)$ - public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, BytesRefHash hash) { + public MultivalueDedupe.HashResult hashAdd(BlockFactory blockFactory, BytesRefHash hash) { $else$ - public MultivalueDedupe.HashResult hash(BlockFactory blockFactory, LongHash hash) { + public MultivalueDedupe.HashResult hashAdd(BlockFactory blockFactory, LongHash hash) { $endif$ try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { boolean sawNull = false; @@ -230,15 +231,15 @@ $if(BytesRef)$ $else$ $type$ v = block.get$Type$(first); $endif$ - hash(builder, hash, v); + hashAdd(builder, hash, v); } default -> { if (count < ALWAYS_COPY_MISSING) { copyMissing(first, count); - hashUniquedWork(hash, builder); + hashAddUniquedWork(hash, builder); } else { copyAndSort(first, count); - hashSortedWork(hash, builder); + hashAddSortedWork(hash, builder); } } } @@ -247,6 +248,44 @@ $endif$ } } + /** + * Dedupe values and build an {@link IntBlock} of their hashes. This block is + * suitable for passing as the grouping block to a {@link GroupingAggregatorFunction}. + */ +$if(BytesRef)$ + public IntBlock hashLookup(BlockFactory blockFactory, BytesRefHash hash) { +$else$ + public IntBlock hashLookup(BlockFactory blockFactory, LongHash hash) { +$endif$ + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(block.getPositionCount())) { + for (int p = 0; p < block.getPositionCount(); p++) { + int count = block.getValueCount(p); + int first = block.getFirstValueIndex(p); + switch (count) { + case 0 -> builder.appendInt(0); + case 1 -> { +$if(BytesRef)$ + BytesRef v = block.getBytesRef(first, work[0]); +$else$ + $type$ v = block.get$Type$(first); +$endif$ + hashLookupSingle(builder, hash, v); + } + default -> { + if (count < ALWAYS_COPY_MISSING) { + copyMissing(first, count); + hashLookupUniquedWork(hash, builder); + } else { + copyAndSort(first, count); + hashLookupSortedWork(hash, builder); + } + } + } + } + return builder.build(); + } + } + /** * Build a {@link BatchEncoder} which deduplicates values at each position * and then encodes the results into a {@link byte[]} which can be used for @@ -434,17 +473,17 @@ $endif$ * Writes an already deduplicated {@link #work} to a hash. */ $if(BytesRef)$ - private void hashUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { + private void hashAddUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { $else$ - private void hashUniquedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddUniquedWork(LongHash hash, IntBlock.Builder builder) { $endif$ if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); for (int i = 0; i < w; i++) { - hash(builder, hash, work[i]); + hashAdd(builder, hash, work[i]); } builder.endPositionEntry(); } @@ -453,26 +492,162 @@ $endif$ * Writes a sorted {@link #work} to a hash, skipping duplicates. */ $if(BytesRef)$ - private void hashSortedWork(BytesRefHash hash, IntBlock.Builder builder) { + private void hashAddSortedWork(BytesRefHash hash, IntBlock.Builder builder) { $else$ - private void hashSortedWork(LongHash hash, IntBlock.Builder builder) { + private void hashAddSortedWork(LongHash hash, IntBlock.Builder builder) { $endif$ if (w == 1) { - hash(builder, hash, work[0]); + hashAdd(builder, hash, work[0]); return; } builder.beginPositionEntry(); $type$ prev = work[0]; - hash(builder, hash, prev); + hashAdd(builder, hash, prev); for (int i = 1; i < w; i++) { + if (false == valuesEqual(prev, work[i])) { + prev = work[i]; + hashAdd(builder, hash, prev); + } + } + builder.endPositionEntry(); + } + + /** + * Looks up an already deduplicated {@link #work} to a hash. + */ $if(BytesRef)$ - if (false == prev.equals(work[i])) { + private void hashLookupUniquedWork(BytesRefHash hash, IntBlock.Builder builder) { $else$ - if (prev != work[i]) { + private void hashLookupUniquedWork(LongHash hash, IntBlock.Builder builder) { $endif$ - prev = work[i]; - hash(builder, hash, prev); + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + int i = 1; + long firstLookup = hashLookup(hash, work[0]); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; } + firstLookup = hashLookup(hash, work[i]); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + appendFound(builder, nextLookup); + } + i++; + } + builder.endPositionEntry(); + } + + /** + * Looks up a sorted {@link #work} to a hash, skipping duplicates. + */ +$if(BytesRef)$ + private void hashLookupSortedWork(BytesRefHash hash, IntBlock.Builder builder) { +$else$ + private void hashLookupSortedWork(LongHash hash, IntBlock.Builder builder) { +$endif$ + if (w == 1) { + hashLookupSingle(builder, hash, work[0]); + return; + } + + /* + * Step 1 - find the first unique value in the hash + * i will contain the next value to probe + * prev will contain the first value in the array contained in the hash + * firstLookup will contain the first value in the hash + */ + int i = 1; + $type$ prev = work[0]; + long firstLookup = hashLookup(hash, prev); + while (firstLookup < 0) { + if (i >= w) { + // Didn't find any values + builder.appendNull(); + return; + } + prev = work[i]; + firstLookup = hashLookup(hash, prev); + i++; + } + + /* + * Step 2 - find the next unique value in the hash + */ + boolean foundSecond = false; + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + builder.beginPositionEntry(); + appendFound(builder, firstLookup); + appendFound(builder, nextLookup); + i++; + foundSecond = true; + break; + } + } + i++; + } + + /* + * Step 3a - we didn't find a second value, just emit the first one + */ + if (false == foundSecond) { + appendFound(builder, firstLookup); + return; + } + + /* + * Step 3b - we found a second value, search for more + */ + while (i < w) { + if (false == valuesEqual(prev, work[i])) { + long nextLookup = hashLookup(hash, work[i]); + if (nextLookup >= 0) { + prev = work[i]; + appendFound(builder, nextLookup); + } + } + i++; } builder.endPositionEntry(); } @@ -494,8 +669,8 @@ $endif$ int end = w; w = 1; for (int i = 1; i < end; i++) { + if (false == valuesEqual(prev, work[i])) { $if(BytesRef)$ - if (false == prev.equals(work[i])) { prev = work[i]; work[w].bytes = prev.bytes; work[w].offset = prev.offset; @@ -503,7 +678,6 @@ $if(BytesRef)$ w++; } $else$ - if (prev != work[i]) { prev = work[i]; work[w++] = prev; } @@ -530,14 +704,51 @@ $if(BytesRef)$ $endif$ $if(BytesRef)$ - private void hash(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { + private void hashAdd(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { $else$ - private void hash(IntBlock.Builder builder, LongHash hash, $type$ v) { + private void hashAdd(IntBlock.Builder builder, LongHash hash, $type$ v) { $endif$ $if(double)$ - builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(Double.doubleToLongBits(v))))); + appendFound(builder, hash.add(Double.doubleToLongBits(v))); +$else$ + appendFound(builder, hash.add(v)); +$endif$ + } + +$if(BytesRef)$ + private long hashLookup(BytesRefHash hash, BytesRef v) { +$else$ + private long hashLookup(LongHash hash, $type$ v) { +$endif$ +$if(double)$ + return hash.find(Double.doubleToLongBits(v)); +$else$ + return hash.find(v); +$endif$ + } + +$if(BytesRef)$ + private void hashLookupSingle(IntBlock.Builder builder, BytesRefHash hash, BytesRef v) { +$else$ + private void hashLookupSingle(IntBlock.Builder builder, LongHash hash, $type$ v) { +$endif$ + long found = hashLookup(hash, v); + if (found >= 0) { + appendFound(builder, found); + } else { + builder.appendNull(); + } + } + + private void appendFound(IntBlock.Builder builder, long found) { + builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(found))); + } + + private static boolean valuesEqual($type$ lhs, $type$ rhs) { +$if(BytesRef)$ + return lhs.equals(rhs); $else$ - builder.appendInt(Math.toIntExact(BlockHash.hashOrdToGroupNullReserved(hash.add(v)))); + return lhs == rhs; $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 36c7a3f178282..ee505704f762b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -861,6 +861,41 @@ public record RandomBlock(List> values, Block block) { int valueCount() { return values.stream().mapToInt(l -> l == null ? 0 : l.size()).sum(); } + + /** + * Build a {@link RandomBlock} contain the values of two blocks, preserving the relative order. + */ + public BasicBlockTests.RandomBlock merge(BasicBlockTests.RandomBlock rhs) { + int estimatedSize = values().size() + rhs.values().size(); + int l = 0; + int r = 0; + List> mergedValues = new ArrayList<>(estimatedSize); + try (Block.Builder mergedBlock = block.elementType().newBlockBuilder(estimatedSize, block.blockFactory())) { + while (l < values.size() && r < rhs.values.size()) { + if (randomBoolean()) { + mergedValues.add(values.get(l)); + mergedBlock.copyFrom(block, l, l + 1); + l++; + } else { + mergedValues.add(rhs.values.get(r)); + mergedBlock.copyFrom(rhs.block, r, r + 1); + r++; + } + } + while (l < values.size()) { + mergedValues.add(values.get(l)); + mergedBlock.copyFrom(block, l, l + 1); + l++; + } + while (r < rhs.values.size()) { + mergedValues.add(rhs.values.get(r)); + mergedBlock.copyFrom(rhs.block, r, r + 1); + r++; + } + return new BasicBlockTests.RandomBlock(mergedValues, mergedBlock.build()); + } + } + } public static RandomBlock randomBlock( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java index 87577612068ab..cd23e54b1eed1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java @@ -40,6 +40,7 @@ import java.util.HashSet; import java.util.List; import java.util.NavigableSet; +import java.util.Objects; import java.util.Set; import java.util.TreeSet; import java.util.function.LongFunction; @@ -73,12 +74,13 @@ private static boolean oneOf(ElementType elementType, ElementType... others) { return false; } - @ParametersFactory + @ParametersFactory(argumentFormatting = "elementType=%s positions=%s nullsAllowed=%s valuesPerPosition=%s-%s dupsPerPosition=%s-%s") public static List params() { List params = new ArrayList<>(); for (ElementType elementType : supportedTypes()) { for (boolean nullAllowed : new boolean[] { false, true }) { for (int max : new int[] { 10, 100, 1000 }) { + params.add(new Object[] { elementType, 1000, nullAllowed, 1, 1, 0, 0 }); params.add(new Object[] { elementType, 1000, nullAllowed, 1, max, 0, 0 }); params.add(new Object[] { elementType, 1000, nullAllowed, 1, max, 0, 100 }); } @@ -294,50 +296,111 @@ private void assertBooleanHash(Set previousValues, BasicBlockTests.Rand private void assertBytesRefHash(Set previousValues, BasicBlockTests.RandomBlock b) { BytesRefHash hash = new BytesRefHash(1, BigArrays.NON_RECYCLING_INSTANCE); - previousValues.stream().forEach(hash::add); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeBytesRef((BytesRefBlock) b.block()).hash(blockFactory(), hash); + previousValues.forEach(hash::add); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeBytesRef((BytesRefBlock) b.block()).hashAdd(blockFactory(), hash); try (IntBlock ords = hashes.ords()) { - assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); assertHash(b, ords, hash.size(), previousValues, i -> hash.get(i, new BytesRef())); + long sizeBeforeLookup = hash.size(); + try (IntBlock lookup = new MultivalueDedupeBytesRef((BytesRefBlock) b.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, b, lookup, i -> hash.get(i, new BytesRef())); + } + BasicBlockTests.RandomBlock other = randomBlock(); + if (randomBoolean()) { + other = b.merge(other); + } + try (IntBlock lookup = new MultivalueDedupeBytesRef((BytesRefBlock) other.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, other, lookup, i -> hash.get(i, new BytesRef())); + } + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); } } private void assertIntHash(Set previousValues, BasicBlockTests.RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); - previousValues.stream().forEach(hash::add); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeInt((IntBlock) b.block()).hash(blockFactory(), hash); + previousValues.forEach(hash::add); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeInt((IntBlock) b.block()).hashAdd(blockFactory(), hash); try (IntBlock ords = hashes.ords()) { - assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); assertHash(b, ords, hash.size(), previousValues, i -> (int) hash.get(i)); + long sizeBeforeLookup = hash.size(); + try (IntBlock lookup = new MultivalueDedupeInt((IntBlock) b.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, b, lookup, i -> (int) hash.get(i)); + } + BasicBlockTests.RandomBlock other = randomBlock(); + if (randomBoolean()) { + other = b.merge(other); + } + try (IntBlock lookup = new MultivalueDedupeInt((IntBlock) other.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, other, lookup, i -> (int) hash.get(i)); + } + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); } } private void assertLongHash(Set previousValues, BasicBlockTests.RandomBlock b) { - LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); - previousValues.stream().forEach(hash::add); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeLong((LongBlock) b.block()).hash(blockFactory(), hash); - try (IntBlock ords = hashes.ords()) { - assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); - assertHash(b, ords, hash.size(), previousValues, i -> hash.get(i)); + try (LongHash hash = new LongHash(1, blockFactory().bigArrays())) { + previousValues.forEach(hash::add); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeLong((LongBlock) b.block()).hashAdd(blockFactory(), hash); + try (IntBlock ords = hashes.ords()) { + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); + assertHash(b, ords, hash.size(), previousValues, hash::get); + long sizeBeforeLookup = hash.size(); + try (IntBlock lookup = new MultivalueDedupeLong((LongBlock) b.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, b, lookup, hash::get); + } + BasicBlockTests.RandomBlock other = randomBlock(); + if (randomBoolean()) { + other = b.merge(other); + } + try (IntBlock lookup = new MultivalueDedupeLong((LongBlock) other.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, other, lookup, hash::get); + } + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); + } } } private void assertDoubleHash(Set previousValues, BasicBlockTests.RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); - previousValues.stream().forEach(d -> hash.add(Double.doubleToLongBits(d))); - MultivalueDedupe.HashResult hashes = new MultivalueDedupeDouble((DoubleBlock) b.block()).hash(blockFactory(), hash); + previousValues.forEach(d -> hash.add(Double.doubleToLongBits(d))); + MultivalueDedupe.HashResult hashes = new MultivalueDedupeDouble((DoubleBlock) b.block()).hashAdd(blockFactory(), hash); try (IntBlock ords = hashes.ords()) { - assertThat(hashes.sawNull(), equalTo(b.values().stream().anyMatch(v -> v == null))); + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); assertHash(b, ords, hash.size(), previousValues, i -> Double.longBitsToDouble(hash.get(i))); + long sizeBeforeLookup = hash.size(); + try (IntBlock lookup = new MultivalueDedupeDouble((DoubleBlock) b.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, b, lookup, i -> Double.longBitsToDouble(hash.get(i))); + } + BasicBlockTests.RandomBlock other = randomBlock(); + if (randomBoolean()) { + other = b.merge(other); + } + try (IntBlock lookup = new MultivalueDedupeDouble((DoubleBlock) other.block()).hashLookup(blockFactory(), hash)) { + assertThat(hash.size(), equalTo(sizeBeforeLookup)); + assertLookup(previousValues, b, other, lookup, i -> Double.longBitsToDouble(hash.get(i))); + } + assertThat(hashes.sawNull(), equalTo(shouldHaveSeenNull(b))); } } + private Boolean shouldHaveSeenNull(BasicBlockTests.RandomBlock b) { + return b.values().stream().anyMatch(Objects::isNull); + } + private void assertHash( BasicBlockTests.RandomBlock b, IntBlock hashes, long hashSize, Set previousValues, - LongFunction lookup + LongFunction valueLookup ) { Set allValues = new HashSet<>(); allValues.addAll(previousValues); @@ -351,22 +414,55 @@ private void assertHash( assertThat(hashes.getInt(start), equalTo(0)); return; } - List actualValues = new ArrayList<>(count); + Set actualValues = new TreeSet<>(); int end = start + count; for (int i = start; i < end; i++) { - actualValues.add(lookup.apply(hashes.getInt(i) - 1)); + actualValues.add(valueLookup.apply(hashes.getInt(i) - 1)); } - assertThat(new HashSet<>(actualValues), containsInAnyOrder(new HashSet<>(v).toArray())); + assertThat(actualValues, equalTo(new TreeSet<>(v))); allValues.addAll(v); } Set hashedValues = new HashSet<>((int) hashSize); for (long i = 0; i < hashSize; i++) { - hashedValues.add(lookup.apply(i)); + hashedValues.add(valueLookup.apply(i)); } assertThat(hashedValues, equalTo(allValues)); } + private void assertLookup( + Set previousValues, + BasicBlockTests.RandomBlock hashed, + BasicBlockTests.RandomBlock lookedUp, + IntBlock lookup, + LongFunction valueLookup + ) { + Set contained = new HashSet<>(previousValues); + for (List values : hashed.values()) { + if (values != null) { + contained.addAll(values); + } + } + for (int p = 0; p < lookedUp.block().getPositionCount(); p++) { + int count = lookup.getValueCount(p); + int start = lookup.getFirstValueIndex(p); + List v = lookedUp.values().get(p); + if (v == null) { + assertThat(count, equalTo(1)); + assertThat(lookup.getInt(start), equalTo(0)); + return; + } + Set actualValues = new TreeSet<>(); + int end = start + count; + for (int i = start; i < end; i++) { + actualValues.add(valueLookup.apply(lookup.getInt(i) - 1)); + } + // System.err.println(actualValues + " " + + // v.stream().filter(contained::contains).collect(Collectors.toCollection(TreeSet::new))); + assertThat(actualValues, equalTo(v.stream().filter(contained::contains).collect(Collectors.toCollection(TreeSet::new)))); + } + } + private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder encoder, int position, int offset, int valueOffset) { List expected = b.values().get(position); if (expected == null) { From bc1262341d0b159f1fdc73df6a567e55d28d52de Mon Sep 17 00:00:00 2001 From: William Brafford Date: Thu, 18 Apr 2024 12:57:57 -0400 Subject: [PATCH 107/130] Mute flaky blocked thread pool test (#107625) (#107630) --- .../org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java index b33320539b57f..e105d61f7ee0a 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java @@ -69,6 +69,7 @@ protected void runWithBlockedThreadPools(Runnable runnable) { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107625") public void testUserThreadPoolsAreBlocked() { assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); From bb0bc17eced828a3ff4f20b1d346e9ec897022be Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 18 Apr 2024 13:19:27 -0400 Subject: [PATCH 108/130] Muted test automation (#106784) --- build-tools-internal/muted-tests.yml | 24 +++ .../internal/ElasticsearchJavaBasePlugin.java | 2 + .../gradle/internal/test/MutedTestPlugin.java | 41 +++++ .../internal/test/MutedTestsBuildService.java | 141 ++++++++++++++++++ .../fixtures/AbstractGradleFuncTest.groovy | 6 + 5 files changed, 214 insertions(+) create mode 100644 build-tools-internal/muted-tests.yml create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java diff --git a/build-tools-internal/muted-tests.yml b/build-tools-internal/muted-tests.yml new file mode 100644 index 0000000000000..341d127c7b64a --- /dev/null +++ b/build-tools-internal/muted-tests.yml @@ -0,0 +1,24 @@ +tests: +# Examples: +# +# Mute a single test case in a YAML test suite: +# - class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT +# method: test {yaml=analysis-common/30_tokenizers/letter} +# issue: https://github.com/elastic/elasticsearch/... +# +# Mute several methods of a Java test: +# - class: org.elasticsearch.common.CharArraysTests +# methods: +# - testCharsBeginsWith +# - testCharsToBytes +# - testConstantTimeEquals +# issue: https://github.com/elastic/elasticsearch/... +# +# Mute an entire test class: +# - class: org.elasticsearch.common.unit.TimeValueTests +# issue: https://github.com/elastic/elasticsearch/... +# +# Mute a single method in a test class: +# - class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIPTests +# method: testCrankyEvaluateBlockWithoutNulls +# issue: https://github.com/elastic/elasticsearch/... diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index dbdb065858f43..f95d9d72a473f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -12,6 +12,7 @@ import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitTaskPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin; +import org.elasticsearch.gradle.internal.test.MutedTestPlugin; import org.elasticsearch.gradle.internal.test.TestUtil; import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider; import org.elasticsearch.gradle.util.GradleUtils; @@ -62,6 +63,7 @@ public void apply(Project project) { project.getPluginManager().apply(RepositoriesSetupPlugin.class); project.getPluginManager().apply(ElasticsearchTestBasePlugin.class); project.getPluginManager().apply(PrecommitTaskPlugin.class); + project.getPluginManager().apply(MutedTestPlugin.class); configureConfigurations(project); configureCompile(project); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java new file mode 100644 index 0000000000000..4df99e7454f32 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.test; + +import org.elasticsearch.gradle.internal.conventions.util.Util; +import org.elasticsearch.gradle.internal.info.BuildParams; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.testing.Test; + +import java.io.File; + +public class MutedTestPlugin implements Plugin { + @Override + public void apply(Project project) { + File infoPath = new File(Util.locateElasticsearchWorkspace(project.getGradle()), "build-tools-internal"); + Provider mutedTestsProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent("mutedTests", MutedTestsBuildService.class, spec -> { + spec.getParameters().getInfoPath().set(infoPath); + }); + + project.getTasks().withType(Test.class).configureEach(test -> { + test.filter(filter -> { + for (String exclude : mutedTestsProvider.get().getExcludePatterns()) { + filter.excludeTestsMatching(exclude); + } + + // Don't fail when all tests are ignored when running in CI + filter.setFailOnNoMatchingTests(BuildParams.isCi() == false); + }); + }); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java new file mode 100644 index 0000000000000..9e4a92f26d4dd --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestsBuildService.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.test; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public abstract class MutedTestsBuildService implements BuildService { + private final List excludePatterns; + + public MutedTestsBuildService() { + File infoPath = getParameters().getInfoPath().get().getAsFile(); + File mutedTestsFile = new File(infoPath, "muted-tests.yml"); + try (InputStream is = new BufferedInputStream(new FileInputStream(mutedTestsFile))) { + ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory()); + List mutedTests = objectMapper.readValue(is, MutedTests.class).getTests(); + excludePatterns = buildExcludePatterns(mutedTests == null ? Collections.emptyList() : mutedTests); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + public List getExcludePatterns() { + return excludePatterns; + } + + private static List buildExcludePatterns(List mutedTests) { + List excludes = new ArrayList<>(); + if (mutedTests.isEmpty() == false) { + for (MutedTestsBuildService.MutedTest mutedTest : mutedTests) { + if (mutedTest.getClassName() != null && mutedTest.getMethods().isEmpty() == false) { + for (String method : mutedTest.getMethods()) { + // Tests that use the randomized runner and parameters end up looking like this: + // test {yaml=analysis-common/30_tokenizers/letter} + // We need to detect this and handle them a little bit different than non-parameterized tests, because of some + // quirks in the randomized runner + int index = method.indexOf(" {"); + String methodWithoutParams = index >= 0 ? method.substring(0, index) : method; + String paramString = index >= 0 ? method.substring(index) : null; + + excludes.add(mutedTest.getClassName() + "." + method); + + if (paramString != null) { + // Because of randomized runner quirks, we need skip the test method by itself whenever we want to skip a test + // that has parameters + // This is because the runner has *two* separate checks that can cause the test to end up getting executed, so + // we need filters that cover both checks + excludes.add(mutedTest.getClassName() + "." + methodWithoutParams); + } else { + // We need to add the following, in case we're skipping an entire class of parameterized tests + excludes.add(mutedTest.getClassName() + "." + method + " *"); + } + } + } else if (mutedTest.getClassName() != null) { + excludes.add(mutedTest.getClassName() + ".*"); + } + } + } + + return excludes; + } + + public interface Params extends BuildServiceParameters { + RegularFileProperty getInfoPath(); + } + + public static class MutedTest { + private final String className; + private final String method; + private final List methods; + private final String issue; + + @JsonCreator + public MutedTest( + @JsonProperty("class") String className, + @JsonProperty("method") String method, + @JsonProperty("methods") List methods, + @JsonProperty("issue") String issue + ) { + this.className = className; + this.method = method; + this.methods = methods; + this.issue = issue; + } + + public List getMethods() { + List allMethods = new ArrayList<>(); + if (methods != null) { + allMethods.addAll(methods); + } + if (method != null) { + allMethods.add(method); + } + + return allMethods; + } + + public String getClassName() { + return className; + } + + public String getIssue() { + return issue; + } + } + + private static class MutedTests { + private final List tests; + + @JsonCreator + MutedTests(@JsonProperty("tests") List tests) { + this.tests = tests; + } + + public List getTests() { + return tests; + } + } +} diff --git a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index 49e9427462195..2f1d1f5d36e87 100644 --- a/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/build-tools/src/testFixtures/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -21,6 +21,8 @@ import spock.lang.Specification import spock.lang.TempDir import java.lang.management.ManagementFactory +import java.nio.file.Files +import java.nio.file.Path import java.util.jar.JarEntry import java.util.jar.JarOutputStream @@ -57,6 +59,10 @@ abstract class AbstractGradleFuncTest extends Specification { id 'base' } """ + def mutedTestsFile = Files.createFile(Path.of(testProjectDir.newFolder("build-tools-internal").path, "muted-tests.yml")) + mutedTestsFile << """ + tests: [] + """ } def cleanup() { From f620961812f968cee032c5313f0d12cd35e590cd Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 18 Apr 2024 14:09:30 -0400 Subject: [PATCH 109/130] [ESQL] Add in the autogenerated docs for a bunch of functions (#107633) --- .../esql/functions/description/pi.asciidoc | 2 +- .../esql/functions/description/round.asciidoc | 2 +- .../esql/functions/kibana/definition/pi.json | 2 +- .../functions/kibana/definition/round.json | 2 +- .../esql/functions/kibana/docs/acos.md | 2 +- .../esql/functions/kibana/docs/asin.md | 2 +- .../esql/functions/kibana/docs/atan.md | 2 +- .../esql/functions/kibana/docs/atan2.md | 2 +- .../esql/functions/kibana/docs/ceil.md | 2 +- .../esql/functions/kibana/docs/cos.md | 2 +- .../esql/functions/kibana/docs/cosh.md | 2 +- .../esql/functions/kibana/docs/floor.md | 2 +- .../esql/functions/kibana/docs/pi.md | 2 +- .../esql/functions/kibana/docs/round.md | 7 ++++--- .../esql/functions/kibana/docs/sin.md | 2 +- .../esql/functions/kibana/docs/sinh.md | 2 +- .../esql/functions/kibana/docs/tan.md | 2 +- .../esql/functions/kibana/docs/tanh.md | 2 +- .../esql/functions/types/div.asciidoc | 7 +++++++ .../esql/functions/types/equals.asciidoc | 21 +++++++++++++++++++ .../functions/types/greater_than.asciidoc | 16 ++++++++++++++ .../types/greater_than_or_equal.asciidoc | 16 ++++++++++++++ .../esql/functions/types/less_than.asciidoc | 16 ++++++++++++++ .../types/less_than_or_equal.asciidoc | 16 ++++++++++++++ .../esql/functions/types/mod.asciidoc | 7 +++++++ .../esql/functions/types/not_equals.asciidoc | 21 +++++++++++++++++++ 26 files changed, 141 insertions(+), 20 deletions(-) diff --git a/docs/reference/esql/functions/description/pi.asciidoc b/docs/reference/esql/functions/description/pi.asciidoc index 90930678ded9f..a0bed1285372e 100644 --- a/docs/reference/esql/functions/description/pi.asciidoc +++ b/docs/reference/esql/functions/description/pi.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. +Returns {wikipedia}/Pi[Pi], the ratio of a circle's circumference to its diameter. diff --git a/docs/reference/esql/functions/description/round.asciidoc b/docs/reference/esql/functions/description/round.asciidoc index aebed74b88485..9ac6bd8db6961 100644 --- a/docs/reference/esql/functions/description/round.asciidoc +++ b/docs/reference/esql/functions/description/round.asciidoc @@ -2,4 +2,4 @@ *Description* -Rounds a number to the closest number with the specified number of digits. Defaults to 0 digits if no number of digits is provided. If the specified number of digits is negative, rounds to the number of digits left of the decimal point. +Rounds a number to the specified number of decimal places. Defaults to 0, which returns the nearest integer. If the precision is a negative number, rounds to the number of digits left of the decimal point. diff --git a/docs/reference/esql/functions/kibana/definition/pi.json b/docs/reference/esql/functions/kibana/definition/pi.json index 047067f852c80..e872762e2680f 100644 --- a/docs/reference/esql/functions/kibana/definition/pi.json +++ b/docs/reference/esql/functions/kibana/definition/pi.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "pi", - "description" : "Returns the ratio of a circle's circumference to its diameter.", + "description" : "Returns Pi, the ratio of a circle's circumference to its diameter.", "signatures" : [ { "params" : [ ], diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json index f8a0c0ce31165..4149f72db7ee6 100644 --- a/docs/reference/esql/functions/kibana/definition/round.json +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "round", - "description" : "Rounds a number to the closest number with the specified number of digits.\nDefaults to 0 digits if no number of digits is provided. If the specified number\nof digits is negative, rounds to the number of digits left of the decimal point.", + "description" : "Rounds a number to the specified number of decimal places.\nDefaults to 0, which returns the nearest integer. If the\nprecision is a negative number, rounds to the number of digits left\nof the decimal point.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/docs/acos.md b/docs/reference/esql/functions/kibana/docs/acos.md index 19ae2522d48b4..d9c05875631d9 100644 --- a/docs/reference/esql/functions/kibana/docs/acos.md +++ b/docs/reference/esql/functions/kibana/docs/acos.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ACOS -Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. +Returns the arccosine of `n` as an angle, expressed in radians. ``` ROW a=.9 diff --git a/docs/reference/esql/functions/kibana/docs/asin.md b/docs/reference/esql/functions/kibana/docs/asin.md index c072ac19b5b92..b26eee366351f 100644 --- a/docs/reference/esql/functions/kibana/docs/asin.md +++ b/docs/reference/esql/functions/kibana/docs/asin.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ASIN -Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input +Returns the arcsine of the input numeric expression as an angle, expressed in radians. ``` diff --git a/docs/reference/esql/functions/kibana/docs/atan.md b/docs/reference/esql/functions/kibana/docs/atan.md index 62686f2fbab2c..9be6e2dbefd11 100644 --- a/docs/reference/esql/functions/kibana/docs/atan.md +++ b/docs/reference/esql/functions/kibana/docs/atan.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ATAN -Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input +Returns the arctangent of the input numeric expression as an angle, expressed in radians. ``` diff --git a/docs/reference/esql/functions/kibana/docs/atan2.md b/docs/reference/esql/functions/kibana/docs/atan2.md index 0000c532236d9..e3129264d6999 100644 --- a/docs/reference/esql/functions/kibana/docs/atan2.md +++ b/docs/reference/esql/functions/kibana/docs/atan2.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ATAN2 -The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the +The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. ``` diff --git a/docs/reference/esql/functions/kibana/docs/ceil.md b/docs/reference/esql/functions/kibana/docs/ceil.md index 812b139206c35..7c696cc46bdd1 100644 --- a/docs/reference/esql/functions/kibana/docs/ceil.md +++ b/docs/reference/esql/functions/kibana/docs/ceil.md @@ -9,4 +9,4 @@ Round a number up to the nearest integer. ROW a=1.8 | EVAL a=CEIL(a) ``` -Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. +Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to Math.ceil. diff --git a/docs/reference/esql/functions/kibana/docs/cos.md b/docs/reference/esql/functions/kibana/docs/cos.md index 9e8abebaddb89..c942ef8e1a49c 100644 --- a/docs/reference/esql/functions/kibana/docs/cos.md +++ b/docs/reference/esql/functions/kibana/docs/cos.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### COS -Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. +Returns the cosine of an angle. ``` ROW a=1.8 diff --git a/docs/reference/esql/functions/kibana/docs/cosh.md b/docs/reference/esql/functions/kibana/docs/cosh.md index b8fae70ae2eed..d5cc126650e44 100644 --- a/docs/reference/esql/functions/kibana/docs/cosh.md +++ b/docs/reference/esql/functions/kibana/docs/cosh.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### COSH -Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. +Returns the hyperbolic cosine of an angle. ``` ROW a=1.8 diff --git a/docs/reference/esql/functions/kibana/docs/floor.md b/docs/reference/esql/functions/kibana/docs/floor.md index 116edc2d15ba7..1f94e0eb74eb6 100644 --- a/docs/reference/esql/functions/kibana/docs/floor.md +++ b/docs/reference/esql/functions/kibana/docs/floor.md @@ -11,4 +11,4 @@ ROW a=1.8 ``` Note: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer -similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. +similar to Math.floor. diff --git a/docs/reference/esql/functions/kibana/docs/pi.md b/docs/reference/esql/functions/kibana/docs/pi.md index 3e179cbeae5d7..676d11cea3ea4 100644 --- a/docs/reference/esql/functions/kibana/docs/pi.md +++ b/docs/reference/esql/functions/kibana/docs/pi.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### PI -Returns the {wikipedia}/Pi[ratio] of a circle's circumference to its diameter. +Returns Pi, the ratio of a circle's circumference to its diameter. ``` ROW PI() diff --git a/docs/reference/esql/functions/kibana/docs/round.md b/docs/reference/esql/functions/kibana/docs/round.md index f6e7ef5314a77..97fa33a8845bd 100644 --- a/docs/reference/esql/functions/kibana/docs/round.md +++ b/docs/reference/esql/functions/kibana/docs/round.md @@ -3,9 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ROUND -Rounds a number to the closest number with the specified number of digits. -Defaults to 0 digits if no number of digits is provided. If the specified number -of digits is negative, rounds to the number of digits left of the decimal point. +Rounds a number to the specified number of decimal places. +Defaults to 0, which returns the nearest integer. If the +precision is a negative number, rounds to the number of digits left +of the decimal point. ``` FROM employees diff --git a/docs/reference/esql/functions/kibana/docs/sin.md b/docs/reference/esql/functions/kibana/docs/sin.md index a87b4e4f452af..1e1fc5ee9c938 100644 --- a/docs/reference/esql/functions/kibana/docs/sin.md +++ b/docs/reference/esql/functions/kibana/docs/sin.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### SIN -Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. +Returns ths Sine trigonometric function of an angle. ``` ROW a=1.8 diff --git a/docs/reference/esql/functions/kibana/docs/sinh.md b/docs/reference/esql/functions/kibana/docs/sinh.md index 81e8d9fd473d5..886b3b95b09f8 100644 --- a/docs/reference/esql/functions/kibana/docs/sinh.md +++ b/docs/reference/esql/functions/kibana/docs/sinh.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### SINH -Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. +Returns the hyperbolic sine of an angle. ``` ROW a=1.8 diff --git a/docs/reference/esql/functions/kibana/docs/tan.md b/docs/reference/esql/functions/kibana/docs/tan.md index edfb4210f7dd2..f1594f4de7476 100644 --- a/docs/reference/esql/functions/kibana/docs/tan.md +++ b/docs/reference/esql/functions/kibana/docs/tan.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### TAN -Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. +Returns the Tangent trigonometric function of an angle. ``` ROW a=1.8 diff --git a/docs/reference/esql/functions/kibana/docs/tanh.md b/docs/reference/esql/functions/kibana/docs/tanh.md index d3d8c7d4e9196..c4a70dec00ba8 100644 --- a/docs/reference/esql/functions/kibana/docs/tanh.md +++ b/docs/reference/esql/functions/kibana/docs/tanh.md @@ -3,7 +3,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### TANH -Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. +Returns the Tangent hyperbolic function of an angle. ``` ROW a=1.8 diff --git a/docs/reference/esql/functions/types/div.asciidoc b/docs/reference/esql/functions/types/div.asciidoc index 79749dda1bc55..61d6381dda194 100644 --- a/docs/reference/esql/functions/types/div.asciidoc +++ b/docs/reference/esql/functions/types/div.asciidoc @@ -6,6 +6,13 @@ |=== lhs | rhs | result double | double | double +double | integer | double +double | long | double +integer | double | double integer | integer | integer +integer | long | long +long | double | double +long | integer | long long | long | long +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index f4da3fd215595..497c9319fedb3 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -5,5 +5,26 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result +boolean | boolean | boolean +cartesian_point | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +datetime | datetime | boolean +double | double | boolean +double | integer | boolean +double | long | boolean +geo_point | geo_point | boolean +geo_shape | geo_shape | boolean +integer | double | boolean integer | integer | boolean +integer | long | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | double | boolean +long | integer | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +unsigned_long | unsigned_long | boolean +version | version | boolean |=== diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index f4da3fd215595..771daf1a953b2 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -5,5 +5,21 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result +datetime | datetime | boolean +double | double | boolean +double | integer | boolean +double | long | boolean +integer | double | boolean integer | integer | boolean +integer | long | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | double | boolean +long | integer | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +unsigned_long | unsigned_long | boolean +version | version | boolean |=== diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index f4da3fd215595..771daf1a953b2 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -5,5 +5,21 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result +datetime | datetime | boolean +double | double | boolean +double | integer | boolean +double | long | boolean +integer | double | boolean integer | integer | boolean +integer | long | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | double | boolean +long | integer | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +unsigned_long | unsigned_long | boolean +version | version | boolean |=== diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index f4da3fd215595..771daf1a953b2 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -5,5 +5,21 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result +datetime | datetime | boolean +double | double | boolean +double | integer | boolean +double | long | boolean +integer | double | boolean integer | integer | boolean +integer | long | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | double | boolean +long | integer | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +unsigned_long | unsigned_long | boolean +version | version | boolean |=== diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index f4da3fd215595..771daf1a953b2 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -5,5 +5,21 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result +datetime | datetime | boolean +double | double | boolean +double | integer | boolean +double | long | boolean +integer | double | boolean integer | integer | boolean +integer | long | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | double | boolean +long | integer | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +unsigned_long | unsigned_long | boolean +version | version | boolean |=== diff --git a/docs/reference/esql/functions/types/mod.asciidoc b/docs/reference/esql/functions/types/mod.asciidoc index 79749dda1bc55..61d6381dda194 100644 --- a/docs/reference/esql/functions/types/mod.asciidoc +++ b/docs/reference/esql/functions/types/mod.asciidoc @@ -6,6 +6,13 @@ |=== lhs | rhs | result double | double | double +double | integer | double +double | long | double +integer | double | double integer | integer | integer +integer | long | long +long | double | double +long | integer | long long | long | long +unsigned_long | unsigned_long | unsigned_long |=== diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index f4da3fd215595..497c9319fedb3 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -5,5 +5,26 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result +boolean | boolean | boolean +cartesian_point | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +datetime | datetime | boolean +double | double | boolean +double | integer | boolean +double | long | boolean +geo_point | geo_point | boolean +geo_shape | geo_shape | boolean +integer | double | boolean integer | integer | boolean +integer | long | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | double | boolean +long | integer | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +unsigned_long | unsigned_long | boolean +version | version | boolean |=== From d41a518201a0edb43f641a00842bee6f5624939f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 18 Apr 2024 14:16:54 -0400 Subject: [PATCH 110/130] ESQL: Generate single-type BlockHash (#107634) This uses our stringtemplate plugin to generate our `BlockHash` implementations that target a single type. They are very similar and it's quite simple to template. --- x-pack/plugin/esql/compute/build.gradle | 34 ++- .../blockhash/BytesRefBlockHash.java | 68 +++--- .../blockhash/DoubleBlockHash.java | 68 +++--- .../aggregation/blockhash/IntBlockHash.java | 52 ++-- .../aggregation/blockhash/LongBlockHash.java | 56 ++--- .../aggregation/blockhash/BlockHash.java | 2 +- .../aggregation/blockhash/X-BlockHash.java.st | 224 ++++++++++++++++++ 7 files changed, 371 insertions(+), 133 deletions(-) rename x-pack/plugin/esql/compute/src/main/{java => generated-src}/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java (72%) rename x-pack/plugin/esql/compute/src/main/{java => generated-src}/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java (66%) rename x-pack/plugin/esql/compute/src/main/{java => generated-src}/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java (74%) rename x-pack/plugin/esql/compute/src/main/{java => generated-src}/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java (74%) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index d04daf6631447..17849d9d97b6d 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -30,13 +30,14 @@ spotless { } } -def prop(Type, type, TYPE, BYTES, Array) { +def prop(Type, type, TYPE, BYTES, Array, Hash) { return [ "Type" : Type, "type" : type, "TYPE" : TYPE, "BYTES" : BYTES, "Array" : Array, + "Hash" : Hash, "int" : type == "int" ? "true" : "", "long" : type == "long" ? "true" : "", @@ -47,11 +48,11 @@ def prop(Type, type, TYPE, BYTES, Array) { } tasks.named('stringTemplates').configure { - var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray") - var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray") - var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray") - var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "") - var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray") + var intProperties = prop("Int", "int", "INT", "Integer.BYTES", "IntArray", "LongHash") + var longProperties = prop("Long", "long", "LONG", "Long.BYTES", "LongArray", "LongHash") + var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES", "DoubleArray", "LongHash") + var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF", "", "BytesRefHash") + var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES", "BitArray", "") // primitive vectors File vectorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st") template { @@ -425,6 +426,27 @@ tasks.named('stringTemplates').configure { it.inputFile = multivalueDedupeInputFile it.outputFile = "org/elasticsearch/compute/operator/MultivalueDedupeBytesRef.java" } + File blockHashInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st") + template { + it.properties = intProperties + it.inputFile = blockHashInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java" + } + template { + it.properties = longProperties + it.inputFile = blockHashInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java" + } + template { + it.properties = doubleProperties + it.inputFile = blockHashInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = blockHashInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java" + } File keyExtractorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/topn/X-KeyExtractor.java.st") template { it.properties = bytesRefProperties diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java similarity index 72% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 8ab1e0aaf23dd..28d1637c6c8b8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -33,9 +32,8 @@ * Maps a {@link BytesRefBlock} column to group ids. */ final class BytesRefBlockHash extends BlockHash { - private final BytesRef bytes = new BytesRef(); private final int channel; - private final BytesRefHash bytesRefHash; + private final BytesRefHash hash; /** * Have we seen any {@code null} values? @@ -49,44 +47,46 @@ final class BytesRefBlockHash extends BlockHash { BytesRefBlockHash(int channel, BlockFactory blockFactory) { super(blockFactory); this.channel = channel; - this.bytesRefHash = new BytesRefHash(1, blockFactory.bigArrays()); + this.hash = new BytesRefHash(1, blockFactory.bigArrays()); } @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { - Block block = page.getBlock(channel); + var block = page.getBlock(channel); if (block.areAllValuesNull()) { seenNull = true; try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) { addInput.add(0, groupIds); } - } else { - BytesRefBlock bytesBlock = (BytesRefBlock) block; - BytesRefVector bytesVector = bytesBlock.asVector(); - if (bytesVector == null) { - try (IntBlock groupIds = add(bytesBlock)) { - addInput.add(0, groupIds); - } - } else { - try (IntVector groupIds = add(bytesVector)) { - addInput.add(0, groupIds); - } + return; + } + BytesRefBlock castBlock = (BytesRefBlock) block; + BytesRefVector vector = castBlock.asVector(); + if (vector == null) { + try (IntBlock groupIds = add(castBlock)) { + addInput.add(0, groupIds); } + return; + } + try (IntVector groupIds = add(vector)) { + addInput.add(0, groupIds); } } private IntVector add(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(bytesRefHash.add(vector.getBytesRef(i, bytes))))); + BytesRef v = vector.getBytesRef(i, scratch); + builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(hash.add(v)))); } return builder.build(); } } private IntBlock add(BytesRefBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(block).hashAdd(blockFactory, bytesRefHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeBytesRef(block).hashAdd(blockFactory, hash); seenNull |= result.sawNull(); return result.ords(); } @@ -98,21 +98,20 @@ public BytesRefBlock[] getKeys() { * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! - if (seenNull) { - try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(bytesRefHash.size() + 1))) { + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); BytesRef spare = new BytesRef(); - for (long i = 0; i < bytesRefHash.size(); i++) { - builder.appendBytesRef(bytesRefHash.get(i, spare)); + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - final int size = Math.toIntExact(bytesRefHash.size()); + final int size = Math.toIntExact(hash.size()); try (BytesStreamOutput out = new BytesStreamOutput()) { - bytesRefHash.getBytesRefs().writeTo(out); + hash.getBytesRefs().writeTo(out); try (StreamInput in = out.bytes().streamInput()) { return new BytesRefBlock[] { blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; @@ -124,29 +123,26 @@ public BytesRefBlock[] getKeys() { @Override public IntVector nonEmpty() { - return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(bytesRefHash.size() + 1), blockFactory); + return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1), blockFactory); } @Override public BitArray seenGroupIds(BigArrays bigArrays) { - return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(bytesRefHash.size() + 1)).seenGroupIds(bigArrays); + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1)).seenGroupIds(bigArrays); } @Override public void close() { - bytesRefHash.close(); + hash.close(); } @Override public String toString() { - return "BytesRefBlockHash{channel=" - + channel - + ", entries=" - + bytesRefHash.size() - + ", size=" - + ByteSizeValue.ofBytes(bytesRefHash.ramBytesUsed()) - + ", seenNull=" - + seenNull - + '}'; + StringBuilder b = new StringBuilder(); + b.append("BytesRefBlockHash{channel=").append(channel); + b.append(", entries=").append(hash.size()); + b.append(", size=").append(ByteSizeValue.ofBytes(hash.ramBytesUsed())); + b.append(", seenNull=").append(seenNull); + return b.append('}').toString(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java similarity index 66% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index b6ce0ebbe81de..fed6e21e63969 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -29,7 +29,7 @@ */ final class DoubleBlockHash extends BlockHash { private final int channel; - private final LongHash longHash; + private final LongHash hash; /** * Have we seen any {@code null} values? @@ -43,7 +43,7 @@ final class DoubleBlockHash extends BlockHash { DoubleBlockHash(int channel, BlockFactory blockFactory) { super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, blockFactory.bigArrays()); + this.hash = new LongHash(1, blockFactory.bigArrays()); } @Override @@ -54,18 +54,18 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) { addInput.add(0, groupIds); } - } else { - DoubleBlock doubleBlock = (DoubleBlock) block; - DoubleVector doubleVector = doubleBlock.asVector(); - if (doubleVector == null) { - try (IntBlock groupIds = add(doubleBlock)) { - addInput.add(0, groupIds); - } - } else { - try (IntVector groupIds = add(doubleVector)) { - addInput.add(0, groupIds); - } + return; + } + DoubleBlock castBlock = (DoubleBlock) block; + DoubleVector vector = castBlock.asVector(); + if (vector == null) { + try (IntBlock groupIds = add(castBlock)) { + addInput.add(0, groupIds); } + return; + } + try (IntVector groupIds = add(vector)) { + addInput.add(0, groupIds); } } @@ -73,73 +73,61 @@ private IntVector add(DoubleVector vector) { int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(Double.doubleToLongBits(vector.getDouble(i)))))); + long v = Double.doubleToLongBits(vector.getDouble(i)); + builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(hash.add(v)))); } return builder.build(); } } private IntBlock add(DoubleBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(block).hashAdd(blockFactory, longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeDouble(block).hashAdd(blockFactory, hash); seenNull |= result.sawNull(); return result.ords(); } - private IntVector lookup(DoubleVector vector) { - int positions = vector.getPositionCount(); - try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { - for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(Double.doubleToLongBits(vector.getDouble(i)))))); - } - return builder.build(); - } - } - - private IntBlock lookup(DoubleBlock block) { - return new MultivalueDedupeDouble(block).hashLookup(blockFactory, longHash); - } - @Override public DoubleBlock[] getKeys() { if (seenNull) { - final int size = Math.toIntExact(longHash.size() + 1); + final int size = Math.toIntExact(hash.size() + 1); final double[] keys = new double[size]; for (int i = 1; i < size; i++) { - keys[i] = Double.longBitsToDouble(longHash.get(i - 1)); + keys[i] = Double.longBitsToDouble(hash.get(i - 1)); } BitSet nulls = new BitSet(1); nulls.set(0); return new DoubleBlock[] { blockFactory.newDoubleArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } - - final int size = Math.toIntExact(longHash.size()); + final int size = Math.toIntExact(hash.size()); final double[] keys = new double[size]; for (int i = 0; i < size; i++) { - keys[i] = Double.longBitsToDouble(longHash.get(i)); + keys[i] = Double.longBitsToDouble(hash.get(i)); } - - // TODO claim the array and wrap? return new DoubleBlock[] { blockFactory.newDoubleArrayVector(keys, keys.length).asBlock() }; } @Override public IntVector nonEmpty() { - return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(longHash.size() + 1), blockFactory); + return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1), blockFactory); } @Override public BitArray seenGroupIds(BigArrays bigArrays) { - return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(longHash.size() + 1)).seenGroupIds(bigArrays); + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1)).seenGroupIds(bigArrays); } @Override public void close() { - longHash.close(); + hash.close(); } @Override public String toString() { - return "DoubleBlockHash{channel=" + channel + ", entries=" + longHash.size() + ", seenNull=" + seenNull + '}'; + StringBuilder b = new StringBuilder(); + b.append("DoubleBlockHash{channel=").append(channel); + b.append(", entries=").append(hash.size()); + b.append(", seenNull=").append(seenNull); + return b.append('}').toString(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java similarity index 74% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 935e9056e4b1a..e40a7138c3610 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -27,7 +27,8 @@ */ final class IntBlockHash extends BlockHash { private final int channel; - private final LongHash longHash; + private final LongHash hash; + /** * Have we seen any {@code null} values? *

    @@ -40,7 +41,7 @@ final class IntBlockHash extends BlockHash { IntBlockHash(int channel, BlockFactory blockFactory) { super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, blockFactory.bigArrays()); + this.hash = new LongHash(1, blockFactory.bigArrays()); } @Override @@ -51,18 +52,18 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) { addInput.add(0, groupIds); } - } else { - IntBlock intBlock = (IntBlock) block; - IntVector intVector = intBlock.asVector(); - if (intVector == null) { - try (IntBlock groupIds = add(intBlock)) { - addInput.add(0, groupIds); - } - } else { - try (IntVector groupIds = add(intVector)) { - addInput.add(0, groupIds); - } + return; + } + IntBlock castBlock = (IntBlock) block; + IntVector vector = castBlock.asVector(); + if (vector == null) { + try (IntBlock groupIds = add(castBlock)) { + addInput.add(0, groupIds); } + return; + } + try (IntVector groupIds = add(vector)) { + addInput.add(0, groupIds); } } @@ -70,14 +71,15 @@ private IntVector add(IntVector vector) { int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getInt(i))))); + int v = vector.getInt(i); + builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(hash.add(v)))); } return builder.build(); } } private IntBlock add(IntBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeInt(block).hashAdd(blockFactory, longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeInt(block).hashAdd(blockFactory, hash); seenNull |= result.sawNull(); return result.ords(); } @@ -85,41 +87,45 @@ private IntBlock add(IntBlock block) { @Override public IntBlock[] getKeys() { if (seenNull) { - final int size = Math.toIntExact(longHash.size() + 1); + final int size = Math.toIntExact(hash.size() + 1); final int[] keys = new int[size]; for (int i = 1; i < size; i++) { - keys[i] = (int) longHash.get(i - 1); + keys[i] = (int) hash.get(i - 1); } BitSet nulls = new BitSet(1); nulls.set(0); return new IntBlock[] { blockFactory.newIntArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } - final int size = Math.toIntExact(longHash.size()); + final int size = Math.toIntExact(hash.size()); final int[] keys = new int[size]; for (int i = 0; i < size; i++) { - keys[i] = (int) longHash.get(i); + keys[i] = (int) hash.get(i); } return new IntBlock[] { blockFactory.newIntArrayVector(keys, keys.length).asBlock() }; } @Override public IntVector nonEmpty() { - return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(longHash.size() + 1), blockFactory); + return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1), blockFactory); } @Override public BitArray seenGroupIds(BigArrays bigArrays) { - return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(longHash.size() + 1)).seenGroupIds(bigArrays); + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1)).seenGroupIds(bigArrays); } @Override public void close() { - longHash.close(); + hash.close(); } @Override public String toString() { - return "IntBlockHash{channel=" + channel + ", entries=" + longHash.size() + ", seenNull=" + seenNull + '}'; + StringBuilder b = new StringBuilder(); + b.append("IntBlockHash{channel=").append(channel); + b.append(", entries=").append(hash.size()); + b.append(", seenNull=").append(seenNull); + return b.append('}').toString(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java similarity index 74% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 440b5de829a92..a41617d0c6704 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -25,11 +25,11 @@ import java.util.BitSet; /** - * Maps {@link LongBlock} to group ids. + * Maps a {@link LongBlock} column to group ids. */ final class LongBlockHash extends BlockHash { private final int channel; - private final LongHash longHash; + private final LongHash hash; /** * Have we seen any {@code null} values? @@ -43,7 +43,7 @@ final class LongBlockHash extends BlockHash { LongBlockHash(int channel, BlockFactory blockFactory) { super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, blockFactory.bigArrays()); + this.hash = new LongHash(1, blockFactory.bigArrays()); } @Override @@ -54,18 +54,18 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) { addInput.add(0, groupIds); } - } else { - LongBlock longBlock = page.getBlock(channel); - LongVector longVector = longBlock.asVector(); - if (longVector == null) { - try (IntBlock groupIds = add(longBlock)) { - addInput.add(0, groupIds); - } - } else { - try (IntVector groupIds = add(longVector)) { - addInput.add(0, groupIds); - } + return; + } + LongBlock castBlock = (LongBlock) block; + LongVector vector = castBlock.asVector(); + if (vector == null) { + try (IntBlock groupIds = add(castBlock)) { + addInput.add(0, groupIds); } + return; + } + try (IntVector groupIds = add(vector)) { + addInput.add(0, groupIds); } } @@ -73,14 +73,15 @@ private IntVector add(LongVector vector) { int positions = vector.getPositionCount(); try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { - builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getLong(i))))); + long v = vector.getLong(i); + builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(hash.add(v)))); } return builder.build(); } } private IntBlock add(LongBlock block) { - MultivalueDedupe.HashResult result = new MultivalueDedupeLong(block).hashAdd(blockFactory, longHash); + MultivalueDedupe.HashResult result = new MultivalueDedupeLong(block).hashAdd(blockFactory, hash); seenNull |= result.sawNull(); return result.ords(); } @@ -88,44 +89,45 @@ private IntBlock add(LongBlock block) { @Override public LongBlock[] getKeys() { if (seenNull) { - final int size = Math.toIntExact(longHash.size() + 1); + final int size = Math.toIntExact(hash.size() + 1); final long[] keys = new long[size]; for (int i = 1; i < size; i++) { - keys[i] = longHash.get(i - 1); + keys[i] = hash.get(i - 1); } BitSet nulls = new BitSet(1); nulls.set(0); return new LongBlock[] { blockFactory.newLongArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; } - - final int size = Math.toIntExact(longHash.size()); + final int size = Math.toIntExact(hash.size()); final long[] keys = new long[size]; for (int i = 0; i < size; i++) { - keys[i] = longHash.get(i); + keys[i] = hash.get(i); } - - // TODO call something like takeKeyOwnership to claim the keys array directly return new LongBlock[] { blockFactory.newLongArrayVector(keys, keys.length).asBlock() }; } @Override public IntVector nonEmpty() { - return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(longHash.size() + 1), blockFactory); + return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1), blockFactory); } @Override public BitArray seenGroupIds(BigArrays bigArrays) { - return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(longHash.size() + 1)).seenGroupIds(bigArrays); + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1)).seenGroupIds(bigArrays); } @Override public void close() { - longHash.close(); + hash.close(); } @Override public String toString() { - return "LongBlockHash{channel=" + channel + ", entries=" + longHash.size() + ", seenNull=" + seenNull + '}'; + StringBuilder b = new StringBuilder(); + b.append("LongBlockHash{channel=").append(channel); + b.append(", entries=").append(hash.size()); + b.append(", seenNull=").append(seenNull); + return b.append('}').toString(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index b43e2ac767b0b..d801da8b28f5c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -32,7 +32,7 @@ * @see BytesRefHash */ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // - permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash,// + permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash, // NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash, TimeSeriesBlockHash { protected final BlockFactory blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st new file mode 100644 index 0000000000000..2c8d9eca1e8d0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -0,0 +1,224 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeValue; +$endif$ +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +$if(BytesRef)$ +import org.elasticsearch.common.util.BytesRefArray; +$endif$ +import org.elasticsearch.common.util.$Hash$; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.SeenGroupIds; +$if(BytesRef)$ +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +$elseif(double)$ +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +$elseif(int)$ +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +$elseif(long)$ +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.$Type$Vector; +$endif$ +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.MultivalueDedupe; +import org.elasticsearch.compute.operator.MultivalueDedupe$Type$; + +$if(BytesRef)$ +import java.io.IOException; + +$else$ +import java.util.BitSet; + +$endif$ +/** + * Maps a {@link $Type$Block} column to group ids. + */ +final class $Type$BlockHash extends BlockHash { + private final int channel; + private final $Hash$ hash; + + /** + * Have we seen any {@code null} values? + *

    + * We reserve the 0 ordinal for the {@code null} key so methods like + * {@link #nonEmpty} need to skip 0 if we haven't seen any null values. + *

    + */ + private boolean seenNull; + + $Type$BlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); + this.channel = channel; + this.hash = new $Hash$(1, blockFactory.bigArrays()); + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + var block = page.getBlock(channel); + if (block.areAllValuesNull()) { + seenNull = true; + try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) { + addInput.add(0, groupIds); + } + return; + } + $Type$Block castBlock = ($Type$Block) block; + $Type$Vector vector = castBlock.asVector(); + if (vector == null) { + try (IntBlock groupIds = add(castBlock)) { + addInput.add(0, groupIds); + } + return; + } + try (IntVector groupIds = add(vector)) { + addInput.add(0, groupIds); + } + } + + private IntVector add($Type$Vector vector) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int positions = vector.getPositionCount(); + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { + for (int i = 0; i < positions; i++) { +$if(double)$ + long v = Double.doubleToLongBits(vector.getDouble(i)); +$elseif(BytesRef)$ + BytesRef v = vector.getBytesRef(i, scratch); +$else$ + $type$ v = vector.get$Type$(i); +$endif$ + builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(hash.add(v)))); + } + return builder.build(); + } + } + + private IntBlock add($Type$Block block) { + MultivalueDedupe.HashResult result = new MultivalueDedupe$Type$(block).hashAdd(blockFactory, hash); + seenNull |= result.sawNull(); + return result.ords(); + } + + @Override + public $Type$Block[] getKeys() { +$if(BytesRef)$ + /* + * Create an un-owned copy of the data so we can close our BytesRefHash + * without and still read from the block. + */ + // TODO replace with takeBytesRefsOwnership ?! + if (seenNull) { + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { + builder.appendNull(); + BytesRef spare = new BytesRef(); + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); + } + return new BytesRefBlock[] { builder.build() }; + } + } + + final int size = Math.toIntExact(hash.size()); + try (BytesStreamOutput out = new BytesStreamOutput()) { + hash.getBytesRefs().writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new BytesRefBlock[] { + blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + } + } catch (IOException e) { + throw new IllegalStateException(e); + } +$else$ + if (seenNull) { + final int size = Math.toIntExact(hash.size() + 1); + final $type$[] keys = new $type$[size]; + for (int i = 1; i < size; i++) { +$if(int)$ + keys[i] = (int) hash.get(i - 1); +$elseif(double)$ + keys[i] = Double.longBitsToDouble(hash.get(i - 1)); +$elseif(long)$ + keys[i] = hash.get(i - 1); +$else$ + syntax error because we didn't cover this case +$endif$ + } + BitSet nulls = new BitSet(1); + nulls.set(0); + return new $Type$Block[] { + blockFactory.new$Type$ArrayBlock(keys, keys.length, null, nulls, Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING) }; + } + final int size = Math.toIntExact(hash.size()); + final $type$[] keys = new $type$[size]; + for (int i = 0; i < size; i++) { +$if(int)$ + keys[i] = (int) hash.get(i); +$elseif(double)$ + keys[i] = Double.longBitsToDouble(hash.get(i)); +$elseif(long)$ + keys[i] = hash.get(i); +$else$ + syntax error because we didn't cover this case +$endif$ + } + return new $Type$Block[] { blockFactory.new$Type$ArrayVector(keys, keys.length).asBlock() }; +$endif$ + } + + @Override + public IntVector nonEmpty() { + return IntVector.range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1), blockFactory); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + return new SeenGroupIds.Range(seenNull ? 0 : 1, Math.toIntExact(hash.size() + 1)).seenGroupIds(bigArrays); + } + + @Override + public void close() { + hash.close(); + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + b.append("$Type$BlockHash{channel=").append(channel); + b.append(", entries=").append(hash.size()); +$if(BytesRef)$ + b.append(", size=").append(ByteSizeValue.ofBytes(hash.ramBytesUsed())); +$endif$ + b.append(", seenNull=").append(seenNull); + return b.append('}').toString(); + } +} From 346fa8b8b56d47dc9bc44ebff8c576d3585fb07f Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Thu, 18 Apr 2024 14:32:44 -0400 Subject: [PATCH 111/130] Remove x-pack references from SQL docs (#107584) --- docs/reference/sql/getting-started.asciidoc | 2 +- docs/reference/sql/index.asciidoc | 4 +--- docs/reference/sql/overview.asciidoc | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/docs/reference/sql/getting-started.asciidoc b/docs/reference/sql/getting-started.asciidoc index 3e9b52074d8c0..43fb661580aa6 100644 --- a/docs/reference/sql/getting-started.asciidoc +++ b/docs/reference/sql/getting-started.asciidoc @@ -40,7 +40,7 @@ Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z // TESTRESPONSE[non_json] You can also use the <>. There is a script to start it -shipped in x-pack's bin directory: +shipped in the Elasticsearch `bin` directory: [source,bash] -------------------------------------------------- diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index 2ae7ea78a803f..797da3c45e0c0 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[xpack-sql]] = SQL @@ -11,8 +10,7 @@ [partintro] -- -X-Pack includes a SQL feature to execute SQL queries against {es} -indices and return results in tabular format. +{es} includes a SQL feature to execute SQL queries against indices and return results in tabular format. The following chapters aim to cover everything from usage, to syntax and drivers. Experienced users or those in a hurry might want to jump directly to diff --git a/docs/reference/sql/overview.asciidoc b/docs/reference/sql/overview.asciidoc index 05160b481ff74..348161bdcb974 100644 --- a/docs/reference/sql/overview.asciidoc +++ b/docs/reference/sql/overview.asciidoc @@ -1,4 +1,3 @@ -[role="xpack"] [[sql-overview]] == Overview @@ -8,7 +7,7 @@ [discrete] === Introduction -{es-sql} is an X-Pack component that allows SQL-like queries to be executed in real-time against {es}. +{es-sql} is a feature that allows SQL-like queries to be executed in real-time against {es}. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data _natively_ inside {es}. One can think of {es-sql} as a _translator_, one that understands both SQL and {es} and makes it easy to read and process data in real-time, at scale by leveraging {es} capabilities. From b1df1a855937351883742c44d8805a5a6ba35fa7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 18 Apr 2024 15:20:14 -0400 Subject: [PATCH 112/130] ESQL: Fix a test bug (#107642) The `RLIKE` test didn't think `#` was a special character. It is. closes #107588 --- .../esql/expression/function/scalar/string/RLikeTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index da8af4e57636c..6c2e6c725cd14 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -39,7 +39,7 @@ public RLikeTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { return parameters(str -> { - for (String syntax : new String[] { "\\", ".", "?", "+", "*", "|", "{", "}", "[", "]", "(", ")", "\"", "<", ">" }) { + for (String syntax : new String[] { "\\", ".", "?", "+", "*", "|", "{", "}", "[", "]", "(", ")", "\"", "<", ">", "#" }) { str = str.replace(syntax, "\\" + syntax); } return str; From bcd62e8d0338867cecca59232c5af5bf242fd191 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 18 Apr 2024 15:19:24 -0500 Subject: [PATCH 113/130] Adding hits_time_in_millis and misses_time_in_millis to enrich cache stats (#107579) --- docs/changelog/107579.yaml | 6 ++++ .../ingest/apis/enrich/enrich-stats.asciidoc | 16 ++++++++-- .../org/elasticsearch/TransportVersions.java | 1 + .../core/enrich/action/EnrichStatsAction.java | 31 ++++++++++++++++--- .../xpack/enrich/EnrichCache.java | 26 ++++++++++++++-- .../xpack/enrich/EnrichCacheTests.java | 23 ++++++++++++-- .../action/EnrichStatsResponseTests.java | 10 +++++- .../enrich/EnrichStatsCollectorTests.java | 2 ++ 8 files changed, 103 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/107579.yaml diff --git a/docs/changelog/107579.yaml b/docs/changelog/107579.yaml new file mode 100644 index 0000000000000..fdee59424b8de --- /dev/null +++ b/docs/changelog/107579.yaml @@ -0,0 +1,6 @@ +pr: 107579 +summary: Adding `hits_time_in_millis` and `misses_time_in_millis` to enrich cache + stats +area: Ingest Node +type: enhancement +issues: [] diff --git a/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc b/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc index 11dd17aac1a8f..ad1ca62e37bbf 100644 --- a/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc +++ b/docs/reference/ingest/apis/enrich/enrich-stats.asciidoc @@ -105,7 +105,7 @@ Number of cached entries. (Integer) The number of enrich lookups served from cache. -`missed`:: +`misses`:: (Integer) The number of time enrich lookups couldn't be served from cache. @@ -113,6 +113,14 @@ served from cache. `evictions`:: (Integer) The number cache entries evicted from the cache. + +`hits_time_in_millis`:: +(Long) +The amount of time in milliseconds spent fetching data from the cache on successful cache hits only. + +`misses_time_in_millis`:: +(Long) +The amount of time in milliseconds spent fetching data from the enrich index and updating the cache, on cache misses only. -- [[enrich-stats-api-example]] @@ -162,7 +170,9 @@ The API returns the following response: "count": 0, "hits": 0, "misses": 0, - "evictions": 0 + "evictions": 0, + "hits_time_in_millis": 0, + "misses_time_in_millis": 0 } ] } @@ -175,3 +185,5 @@ The API returns the following response: // TESTRESPONSE[s/"count": 0/"count" : $body.cache_stats.0.count/] // TESTRESPONSE[s/"misses": 0/"misses" : $body.cache_stats.0.misses/] // TESTRESPONSE[s/"evictions": 0/"evictions" : $body.cache_stats.0.evictions/] +// TESTRESPONSE[s/"hits_time_in_millis": 0/"hits_time_in_millis" : $body.cache_stats.0.hits_time_in_millis/] +// TESTRESPONSE[s/"misses_time_in_millis": 0/"misses_time_in_millis" : $body.cache_stats.0.misses_time_in_millis/] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a66d96587c5e5..cfbe5e1ac1cbd 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -176,6 +176,7 @@ static TransportVersion def(int id) { public static final TransportVersion ILM_SHRINK_ENABLE_WRITE = def(8_635_00_0); public static final TransportVersion GEOIP_CACHE_STATS = def(8_636_00_0); public static final TransportVersion WATERMARK_THRESHOLDS_STATS = def(8_637_00_0); + public static final TransportVersion ENRICH_CACHE_ADDITIONAL_STATS = def(8_638_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index be8513a8269a3..41cd35557c18d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.enrich.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -13,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; @@ -184,13 +186,26 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - public record CacheStats(String nodeId, long count, long hits, long misses, long evictions) - implements - Writeable, - ToXContentFragment { + public record CacheStats( + String nodeId, + long count, + long hits, + long misses, + long evictions, + long hitsTimeInMillis, + long missesTimeInMillis + ) implements Writeable, ToXContentFragment { public CacheStats(StreamInput in) throws IOException { - this(in.readString(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong()); + this( + in.readString(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readVLong() : -1, + in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readVLong() : -1 + ); } @Override @@ -200,6 +215,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("hits", hits); builder.field("misses", misses); builder.field("evictions", evictions); + builder.humanReadableField("hits_time_in_millis", "hits_time", new TimeValue(hitsTimeInMillis)); + builder.humanReadableField("misses_time_in_millis", "misses_time", new TimeValue(missesTimeInMillis)); return builder; } @@ -210,6 +227,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(hits); out.writeVLong(misses); out.writeVLong(evictions); + if (out.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS)) { + out.writeVLong(hitsTimeInMillis); + out.writeVLong(missesTimeInMillis); + } } } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index 722328b6b76d6..e36707b0b8bc4 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; @@ -25,7 +26,9 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; +import java.util.function.LongSupplier; /** * A simple cache for enrich that uses {@link Cache}. There is one instance of this cache and @@ -48,9 +51,18 @@ public final class EnrichCache { private final Cache>> cache; + private final LongSupplier relativeNanoTimeProvider; + private final AtomicLong hitsTimeInNanos = new AtomicLong(0); + private final AtomicLong missesTimeInNanos = new AtomicLong(0); private volatile Metadata metadata; EnrichCache(long maxSize) { + this(maxSize, System::nanoTime); + } + + // non-private for unit testing only + EnrichCache(long maxSize, LongSupplier relativeNanoTimeProvider) { + this.relativeNanoTimeProvider = relativeNanoTimeProvider; this.cache = CacheBuilder.>>builder().setMaximumWeight(maxSize).build(); } @@ -67,14 +79,22 @@ public void computeIfAbsent( ActionListener>> listener ) { // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. + long cacheStart = relativeNanoTimeProvider.getAsLong(); List> response = get(searchRequest); + long cacheRequestTime = relativeNanoTimeProvider.getAsLong() - cacheStart; if (response != null) { + hitsTimeInNanos.addAndGet(cacheRequestTime); listener.onResponse(response); } else { + + final long retrieveStart = relativeNanoTimeProvider.getAsLong(); searchResponseFetcher.accept(searchRequest, ActionListener.wrap(resp -> { List> value = toCacheValue(resp); put(searchRequest, value); - listener.onResponse(deepCopy(value, false)); + List> copy = deepCopy(value, false); + long databaseQueryAndCachePutTime = relativeNanoTimeProvider.getAsLong() - retrieveStart; + missesTimeInNanos.addAndGet(cacheRequestTime + databaseQueryAndCachePutTime); + listener.onResponse(copy); }, listener::onFailure)); } } @@ -111,7 +131,9 @@ public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { cache.count(), cacheStats.getHits(), cacheStats.getMisses(), - cacheStats.getEvictions() + cacheStats.getEvictions(), + TimeValue.nsecToMSec(hitsTimeInNanos.get()), + TimeValue.nsecToMSec(missesTimeInNanos.get()) ); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java index 2ecb40b306a63..f2f2948db41ee 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.MatchQueryBuilder; @@ -23,6 +24,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; import java.io.IOException; import java.util.ArrayList; @@ -31,6 +33,7 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -148,7 +151,7 @@ public void testCaching() { assertThat(cacheStats.evictions(), equalTo(4L)); } - public void testPutIfAbsent() throws InterruptedException { + public void testComputeIfAbsent() throws InterruptedException { // Emulate cluster metadata: // (two enrich indices with corresponding alias entries) var metadata = Metadata.builder() @@ -177,7 +180,9 @@ public void testPutIfAbsent() throws InterruptedException { Map.of("key1", "value1", "key2", "value2"), Map.of("key3", "value3", "key4", "value4") ); - EnrichCache enrichCache = new EnrichCache(3); + final AtomicLong testNanoTime = new AtomicLong(0); + // We use a relative time provider that increments 1ms every time it is called. So each operation appears to take 1ms + EnrichCache enrichCache = new EnrichCache(3, () -> testNanoTime.addAndGet(TimeValue.timeValueMillis(1).getNanos())); enrichCache.setMetadata(metadata); { @@ -202,6 +207,13 @@ public void onFailure(Exception e) { }); assertThat(queriedDatabaseLatch.await(5, TimeUnit.SECONDS), equalTo(true)); assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true)); + EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10)); + assertThat(cacheStats.count(), equalTo(1L)); + assertThat(cacheStats.hits(), equalTo(0L)); + assertThat(cacheStats.misses(), equalTo(1L)); + assertThat(cacheStats.evictions(), equalTo(0L)); + assertThat(cacheStats.hitsTimeInMillis(), equalTo(0L)); + assertThat(cacheStats.missesTimeInMillis(), equalTo(2L)); // cache query and enrich query + cache put } { @@ -220,6 +232,13 @@ public void onFailure(Exception e) { } }); assertThat(notifiedOfResultLatch.await(5, TimeUnit.SECONDS), equalTo(true)); + EnrichStatsAction.Response.CacheStats cacheStats = enrichCache.getStats(randomAlphaOfLength(10)); + assertThat(cacheStats.count(), equalTo(1L)); + assertThat(cacheStats.hits(), equalTo(1L)); + assertThat(cacheStats.misses(), equalTo(1L)); + assertThat(cacheStats.evictions(), equalTo(0L)); + assertThat(cacheStats.hitsTimeInMillis(), equalTo(1L)); + assertThat(cacheStats.missesTimeInMillis(), equalTo(2L)); } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java index 87c4348ccd522..14e3008cda02f 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichStatsResponseTests.java @@ -44,7 +44,15 @@ protected EnrichStatsAction.Response createTestInstance() { ); coordinatorStats.add(stats); cacheStats.add( - new CacheStats(nodeId, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()) + new CacheStats( + nodeId, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong() + ) ); } return new EnrichStatsAction.Response(executingPolicies, coordinatorStats, cacheStats); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java index aaac676fa6c21..a38b2605c1ff0 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/monitoring/collector/enrich/EnrichStatsCollectorTests.java @@ -91,6 +91,8 @@ public void testDoCollect() throws Exception { randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong() ) ); From 0a856cc658963740d2b975fe0797d020cbad255e Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 18 Apr 2024 21:36:32 +0100 Subject: [PATCH 114/130] Expose Executor parameter on SharedBlobCacheService (#107626) Rather than specifying the `Executor` to use for fetch operations in the cache service constructor, this commit permits callers to specify the executor to use on individual fetch operations. --- .../shared/SharedBlobCacheService.java | 64 ++++++++++++++++++- .../shared/SharedBlobCacheServiceTests.java | 14 ++-- 2 files changed, 72 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 0d51a4271e85b..d27b2cc3c774d 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -299,6 +299,7 @@ private CacheEntry(T chunk) { private final Executor ioExecutor; // executor to run bulk reading from the blobstore on + @Deprecated(forRemoval = true) private final Executor bulkIOExecutor; private final SharedBytes sharedBytes; @@ -485,7 +486,35 @@ CacheFileRegion get(KeyType cacheKey, long fileLength, int region) { * * @return {@code true} if there were enough free pages to start downloading the full entry */ + @Deprecated(forRemoval = true) public boolean maybeFetchFullEntry(KeyType cacheKey, long length, RangeMissingHandler writer, ActionListener listener) { + return maybeFetchFullEntry(cacheKey, length, writer, bulkIOExecutor, listener); + } + + /** + * Fetch and cache the full blob for the given cache entry from the remote repository if there + * are enough free pages in the cache to do so. + *

    + * This method returns as soon as the download tasks are instantiated, but the tasks themselves + * are run on the bulk executor. + *

    + * If an exception is thrown from the writer then the cache entry being downloaded is freed + * and unlinked + * + * @param cacheKey the key to fetch data for + * @param length the length of the blob to fetch + * @param writer a writer that handles writing of newly downloaded data to the shared cache + * @param fetchExecutor an executor to use for reading from the blob store + * @param listener listener that is called once all downloading has finished + * @return {@code true} if there were enough free pages to start downloading the full entry + */ + public boolean maybeFetchFullEntry( + KeyType cacheKey, + long length, + RangeMissingHandler writer, + Executor fetchExecutor, + ActionListener listener + ) { int finalRegion = getEndingRegion(length); if (freeRegionCount() < finalRegion) { // Not enough room to download a full file without evicting existing data, so abort @@ -517,7 +546,7 @@ public boolean maybeFetchFullEntry(KeyType cacheKey, long length, RangeMissingHa rangeToWrite, (channel, pos, relativePos, len) -> Math.toIntExact(len), writer, - bulkIOExecutor, + fetchExecutor, regionListener.delegateResponse((l, e) -> { if (e instanceof AlreadyClosedException) { l.onResponse(0); @@ -549,11 +578,42 @@ public boolean maybeFetchFullEntry(KeyType cacheKey, long length, RangeMissingHa * region to write is already available in cache, if the region is pending fetching via another thread or if there is * not enough free pages to fetch the region. */ + @Deprecated(forRemoval = true) + public void maybeFetchRegion( + final KeyType cacheKey, + final int region, + final long blobLength, + final RangeMissingHandler writer, + final ActionListener listener + ) { + maybeFetchRegion(cacheKey, region, blobLength, writer, bulkIOExecutor, listener); + } + + /** + * Fetch and write in cache a region of a blob if there are enough free pages in the cache to do so. + *

    + * This method returns as soon as the download tasks are instantiated, but the tasks themselves + * are run on the bulk executor. + *

    + * If an exception is thrown from the writer then the cache entry being downloaded is freed + * and unlinked + * + * @param cacheKey the key to fetch data for + * @param region the region of the blob to fetch + * @param blobLength the length of the blob from which the region is fetched (used to compute the size of the ending region) + * @param writer a writer that handles writing of newly downloaded data to the shared cache + * @param fetchExecutor an executor to use for reading from the blob store + * @param listener a listener that is completed with {@code true} if the current thread triggered the fetching of the region, in + * which case the data is available in cache. The listener is completed with {@code false} in every other cases: if + * the region to write is already available in cache, if the region is pending fetching via another thread or if + * there is not enough free pages to fetch the region. + */ public void maybeFetchRegion( final KeyType cacheKey, final int region, final long blobLength, final RangeMissingHandler writer, + final Executor fetchExecutor, final ActionListener listener ) { if (freeRegionCount() < 1 && maybeEvictLeastUsed() == false) { @@ -568,7 +628,7 @@ public void maybeFetchRegion( return; } final CacheFileRegion entry = get(cacheKey, blobLength, region); - entry.populate(regionRange, writer, bulkIOExecutor, listener); + entry.populate(regionRange, writer, fetchExecutor, listener); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java index 5cdd44ad86332..ec9d5d5c541f4 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java @@ -530,6 +530,7 @@ public void execute(Runnable command) { return generic; } }; + final var bulkExecutor = threadPool.executor("bulk"); try ( NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); @@ -551,7 +552,7 @@ public void execute(Runnable command) { cacheService.maybeFetchFullEntry(cacheKey, size, (channel, channelPos, relativePos, length, progressUpdater) -> { bytesRead.addAndGet(-length); progressUpdater.accept(length); - }, future); + }, bulkExecutor, future); future.get(10, TimeUnit.SECONDS); assertEquals(0L, bytesRead.get()); @@ -564,7 +565,7 @@ public void execute(Runnable command) { assertEquals(2, cacheService.freeRegionCount()); var configured = cacheService.maybeFetchFullEntry(cacheKey, size(500), (ch, chPos, relPos, len, update) -> { throw new AssertionError("Should never reach here"); - }, ActionListener.noop()); + }, bulkExecutor, ActionListener.noop()); assertFalse(configured); assertEquals(2, cacheService.freeRegionCount()); } @@ -591,6 +592,7 @@ public ExecutorService executor(String name) { return generic; } }; + final var bulkExecutor = threadPool.executor("bulk"); try ( NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); @@ -616,6 +618,7 @@ public ExecutorService executor(String name) { cacheKey, size, (channel, channelPos, relativePos, length, progressUpdater) -> progressUpdater.accept(length), + bulkExecutor, f ) ); @@ -944,6 +947,7 @@ public void execute(Runnable command) { return generic; } }; + final var bulkExecutor = threadPool.executor("bulk"); try ( NodeEnvironment environment = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); var cacheService = new SharedBlobCacheService<>( @@ -965,7 +969,7 @@ public void execute(Runnable command) { cacheService.maybeFetchRegion(cacheKey, 0, blobLength, (channel, channelPos, relativePos, length, progressUpdater) -> { bytesRead.addAndGet(length); progressUpdater.accept(length); - }, future); + }, bulkExecutor, future); var fetched = future.get(10, TimeUnit.SECONDS); assertThat("Region has been fetched", fetched, is(true)); @@ -993,6 +997,7 @@ public void execute(Runnable command) { bytesRead.addAndGet(length); progressUpdater.accept(length); }, + bulkExecutor, listener ); } @@ -1015,6 +1020,7 @@ public void execute(Runnable command) { (channel, channelPos, relativePos, length, progressUpdater) -> { throw new AssertionError("should not be executed"); }, + bulkExecutor, future ); assertThat("Listener is immediately completed", future.isDone(), is(true)); @@ -1032,7 +1038,7 @@ public void execute(Runnable command) { cacheService.maybeFetchRegion(cacheKey, 0, blobLength, (channel, channelPos, relativePos, length, progressUpdater) -> { bytesRead.addAndGet(length); progressUpdater.accept(length); - }, future); + }, bulkExecutor, future); var fetched = future.get(10, TimeUnit.SECONDS); assertThat("Region has been fetched", fetched, is(true)); From edfa74e59ea81a5a75edc98376efd557966df425 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 18 Apr 2024 16:00:57 -0500 Subject: [PATCH 115/130] Adding human readable times to geoip stats (#107647) --- docs/changelog/107647.yaml | 5 +++++ .../elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java | 5 +++-- 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107647.yaml diff --git a/docs/changelog/107647.yaml b/docs/changelog/107647.yaml new file mode 100644 index 0000000000000..97d98a7c91079 --- /dev/null +++ b/docs/changelog/107647.yaml @@ -0,0 +1,5 @@ +pr: 107647 +summary: Adding human readable times to geoip stats +area: Ingest Node +type: enhancement +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java index e2aec479a6742..2557e8c4682ac 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -136,8 +137,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("hits", cacheStats.hits()); builder.field("misses", cacheStats.misses()); builder.field("evictions", cacheStats.evictions()); - builder.field("hits_time_in_millis", cacheStats.hitsTimeInMillis()); - builder.field("misses_time_in_millis", cacheStats.missesTimeInMillis()); + builder.humanReadableField("hits_time_in_millis", "hits_time", new TimeValue(cacheStats.hitsTimeInMillis())); + builder.humanReadableField("misses_time_in_millis", "misses_time", new TimeValue(cacheStats.missesTimeInMillis())); builder.endObject(); builder.endObject(); } From f3400081a41e8529d41a5fe8d3206e0577f00641 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 18 Apr 2024 18:19:21 -0500 Subject: [PATCH 116/130] Avoiding use of writeVLong for negative numbers in enrich cache stats (#107649) The enrich cache updates in #107579 used `StreamOutput::writeVLong` to write a potentially negative number. `writeVLong` does not support negative numbers. --- .../xpack/core/enrich/action/EnrichStatsAction.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java index 41cd35557c18d..b69a23614c573 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/EnrichStatsAction.java @@ -203,8 +203,8 @@ public CacheStats(StreamInput in) throws IOException { in.readVLong(), in.readVLong(), in.readVLong(), - in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readVLong() : -1, - in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readVLong() : -1 + in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readLong() : -1, + in.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS) ? in.readLong() : -1 ); } @@ -228,8 +228,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(misses); out.writeVLong(evictions); if (out.getTransportVersion().onOrAfter(TransportVersions.ENRICH_CACHE_ADDITIONAL_STATS)) { - out.writeVLong(hitsTimeInMillis); - out.writeVLong(missesTimeInMillis); + out.writeLong(hitsTimeInMillis); + out.writeLong(missesTimeInMillis); } } } From db88bec5f45a8cef9505defdeff489c9ce461500 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 19 Apr 2024 12:15:41 +0300 Subject: [PATCH 117/130] Abstract realm cache clear for role mappers (#107360) This is a refactoring PR. This creates a base class (AbstractRoleMapperClearRealmCache) that the concrete role mappers (currently only DnRoleMapper and NativeRoleMappingStore) extend. The abstract base class is responsible for storing references to realms that need to be "refreshed" when the role mapper's contents change, and to expose methods to trigger the "refreshes". --- .../authc/support/UserRoleMapper.java | 2 +- .../xpack/security/authc/jwt/JwtRealm.java | 2 +- .../authc/kerberos/KerberosRealm.java | 2 +- .../xpack/security/authc/ldap/LdapRealm.java | 2 +- .../xpack/security/authc/pki/PkiRealm.java | 2 +- .../security/authc/support/DnRoleMapper.java | 25 +----- .../AbstractRoleMapperClearRealmCache.java | 81 ++++++++++++++++++ .../support/mapper/CompositeRoleMapper.java | 4 +- .../support/mapper/ExcludingRoleMapper.java | 4 +- .../mapper/NativeRoleMappingStore.java | 54 +++--------- .../KerberosRealmAuthenticateFailedTests.java | 2 +- .../kerberos/KerberosRealmCacheTests.java | 6 +- .../authc/kerberos/KerberosRealmTests.java | 4 +- .../authc/ldap/ActiveDirectoryRealmTests.java | 2 +- .../security/authc/ldap/LdapRealmTests.java | 2 +- .../security/authc/pki/PkiRealmTests.java | 2 +- .../authc/support/DnRoleMapperTests.java | 28 ++++--- .../mapper/ExcludingRoleMapperTests.java | 4 +- .../mapper/NativeRoleMappingStoreTests.java | 84 ++++++++++--------- .../example/realm/CustomRoleMappingRealm.java | 2 +- 20 files changed, 175 insertions(+), 139 deletions(-) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/AbstractRoleMapperClearRealmCache.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java index 96fb7ff4e6f41..d8f5ea6096b1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java @@ -46,7 +46,7 @@ public interface UserRoleMapper { * the whole cluster depending on whether this role-mapper has node-local data or cluster-wide * data. */ - void refreshRealmOnChange(CachingRealm realm); + void clearRealmCacheOnChange(CachingRealm realm); /** * A representation of a user for whom roles should be mapped. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java index a541eef2f07f6..30a7e438e70b0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java @@ -89,7 +89,7 @@ public JwtRealm(final RealmConfig realmConfig, final SSLService sslService, fina throws SettingsException { super(realmConfig); this.userRoleMapper = userRoleMapper; - this.userRoleMapper.refreshRealmOnChange(this); + this.userRoleMapper.clearRealmCacheOnChange(this); this.allowedClockSkew = realmConfig.getSetting(JwtRealmSettings.ALLOWED_CLOCK_SKEW); this.populateUserMetadata = realmConfig.getSetting(JwtRealmSettings.POPULATE_USER_METADATA); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index 273fe6c487439..d1cff736ef40c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -86,7 +86,7 @@ public KerberosRealm(final RealmConfig config, final NativeRoleMappingStore nati ) { super(config); this.userRoleMapper = nativeRoleMappingStore; - this.userRoleMapper.refreshRealmOnChange(this); + this.userRoleMapper.clearRealmCacheOnChange(this); final TimeValue ttl = config.getSetting(KerberosRealmSettings.CACHE_TTL_SETTING); if (ttl.getNanos() > 0) { this.userPrincipalNameToUserCache = (userPrincipalNameToUserCache == null) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java index 48dd0fda5b569..0c66389253e74 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java @@ -90,7 +90,7 @@ public LdapRealm( this.roleMapper = roleMapper; this.threadPool = threadPool; this.executionTimeout = config.getSetting(LdapRealmSettings.EXECUTION_TIMEOUT); - roleMapper.refreshRealmOnChange(this); + roleMapper.clearRealmCacheOnChange(this); } static SessionFactory sessionFactory(RealmConfig config, SSLService sslService, ThreadPool threadPool) throws LDAPException { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java index 93d70d831fa7b..785add149bc00 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java @@ -92,7 +92,7 @@ public PkiRealm(RealmConfig config, ResourceWatcherService watcherService, Nativ this.trustManager = trustManagers(config); this.principalPattern = config.getSetting(PkiRealmSettings.USERNAME_PATTERN_SETTING); this.roleMapper = roleMapper; - this.roleMapper.refreshRealmOnChange(this); + this.roleMapper.clearRealmCacheOnChange(this); this.cache = CacheBuilder.builder() .setExpireAfterWrite(config.getSetting(PkiRealmSettings.CACHE_TTL_SETTING)) .setMaximumWeight(config.getSetting(PkiRealmSettings.CACHE_MAX_USERS_SETTING)) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index a2fe426e7a50c..9c3714124f4f8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -21,9 +21,8 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.core.security.authc.support.CachingRealm; import org.elasticsearch.xpack.core.security.authc.support.DnRoleMapperSettings; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.authc.support.mapper.AbstractRoleMapperClearRealmCache; import java.io.IOException; import java.nio.file.Files; @@ -34,9 +33,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -48,19 +45,17 @@ /** * This class loads and monitors the file defining the mappings of DNs to internal ES Roles. */ -public class DnRoleMapper implements UserRoleMapper { +public class DnRoleMapper extends AbstractRoleMapperClearRealmCache { private static final Logger logger = LogManager.getLogger(DnRoleMapper.class); protected final RealmConfig config; private final Path file; private final boolean useUnmappedGroupsAsRoles; - private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); private volatile Map> dnRoles; public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { this.config = config; - useUnmappedGroupsAsRoles = config.getSetting(DnRoleMapperSettings.USE_UNMAPPED_GROUPS_AS_ROLES_SETTING); file = resolveFile(config); dnRoles = parseFileLenient(file, logger, config.type(), config.name()); @@ -73,15 +68,6 @@ public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { } } - @Override - public void refreshRealmOnChange(CachingRealm realm) { - addListener(realm::expireAll); - } - - synchronized void addListener(Runnable listener) { - listeners.add(Objects.requireNonNull(listener, "listener cannot be null")); - } - public static Path resolveFile(RealmConfig realmConfig) { String location = realmConfig.getSetting(DnRoleMapperSettings.ROLE_MAPPING_FILE_SETTING); return XPackPlugin.resolveConfigFile(realmConfig.env(), location); @@ -232,10 +218,6 @@ public Set resolveRoles(String userDnString, Collection groupDns return roles; } - public void notifyRefresh() { - listeners.forEach(Runnable::run); - } - private class FileListener implements FileChangesListener { @Override public void onFileCreated(Path file) { @@ -260,10 +242,9 @@ public void onFileChanged(Path file) { config.type(), config.name() ); - notifyRefresh(); + clearRealmCachesOnLocalNode(); } } } } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/AbstractRoleMapperClearRealmCache.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/AbstractRoleMapperClearRealmCache.java new file mode 100644 index 0000000000000..8c081ff97b41c --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/AbstractRoleMapperClearRealmCache.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.authc.support.mapper; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; +import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest; +import org.elasticsearch.xpack.core.security.authc.support.CachingRealm; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +/** + * This is the base class for {@link UserRoleMapper} implementations that need to notify registered {@link CachingRealm}s, + * when the role mapping rules change, to invalidate their caches that could rely on the obsolete role mapping rules. + */ +public abstract class AbstractRoleMapperClearRealmCache implements UserRoleMapper { + + private static final Logger logger = LogManager.getLogger(AbstractRoleMapperClearRealmCache.class); + private final List realmNamesToClearCaches = new CopyOnWriteArrayList<>(); + private final List localRealmCacheInvalidators = new CopyOnWriteArrayList<>(); + + /** + * Indicates that the provided realm should have its cache cleared if this store is updated. + * @see ClearRealmCacheAction + */ + @Override + public void clearRealmCacheOnChange(CachingRealm realm) { + realmNamesToClearCaches.add(realm.name()); + localRealmCacheInvalidators.add(realm::expireAll); + } + + /** + * {@link UserRoleMapper} implementations should be calling this method after role mappings changed, + * in order to clear realm caches across the cluster. + */ + protected void clearRealmCachesOnAllNodes(Client client, ActionListener listener) { + if (realmNamesToClearCaches.isEmpty()) { + listener.onResponse(null); + return; + } + final String[] realmNames = this.realmNamesToClearCaches.toArray(Strings.EMPTY_ARRAY); + executeAsyncWithOrigin( + client, + SECURITY_ORIGIN, + ClearRealmCacheAction.INSTANCE, + new ClearRealmCacheRequest().realms(realmNames), + ActionListener.wrap(response -> { + logger.debug(() -> format("Cleared cached in realms [%s] due to role mapping change", Arrays.toString(realmNames))); + listener.onResponse(null); + }, ex -> { + logger.warn(() -> "Failed to clear cache for realms [" + Arrays.toString(realmNames) + "]", ex); + listener.onFailure(ex); + }) + ); + } + + // public for testing + /** + * {@link UserRoleMapper} implementations should be calling this method after role mappings changed, + * in order to clear realm caches on the local node only. + */ + public void clearRealmCachesOnLocalNode() { + localRealmCacheInvalidators.forEach(Runnable::run); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java index 260550f9ff087..12b5ddc07786c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java @@ -54,8 +54,8 @@ public void resolveRoles(UserData user, ActionListener> listener) { } @Override - public void refreshRealmOnChange(CachingRealm realm) { - this.delegates.forEach(mapper -> mapper.refreshRealmOnChange(realm)); + public void clearRealmCacheOnChange(CachingRealm realm) { + this.delegates.forEach(mapper -> mapper.clearRealmCacheOnChange(realm)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapper.java index 70f5213deb676..5cedf284b8d9b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapper.java @@ -44,7 +44,7 @@ private Set excludeRoles(Set resolvedRoles) { } @Override - public void refreshRealmOnChange(CachingRealm realm) { - delegate.refreshRealmOnChange(realm); + public void clearRealmCacheOnChange(CachingRealm realm) { + delegate.clearRealmCacheOnChange(realm); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 926626f2eaf10..cd1291f7379cb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -33,12 +32,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.ScrollHelper; -import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; -import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.authc.support.CachingRealm; -import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; @@ -46,14 +41,12 @@ import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import java.io.IOException; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -82,7 +75,7 @@ * is done by this class. Modification operations make a best effort attempt to clear the cache * on all nodes for the user that was modified. */ -public class NativeRoleMappingStore implements UserRoleMapper { +public class NativeRoleMappingStore extends AbstractRoleMapperClearRealmCache { /** * This setting is never registered by the security plugin - in order to disable the native role APIs @@ -112,7 +105,6 @@ public class NativeRoleMappingStore implements UserRoleMapper { private final Client client; private final SecurityIndexManager securityIndex; private final ScriptService scriptService; - private final List realmsToRefresh = new CopyOnWriteArrayList<>(); private final boolean lastLoadCacheEnabled; private final AtomicReference> lastLoadRef = new AtomicReference<>(null); private final boolean enabled; @@ -219,7 +211,13 @@ private void modifyMapping( } else { try { logger.trace("Modifying role mapping [{}] for [{}]", name, request.getClass().getSimpleName()); - inner.accept(request, ActionListener.wrap(r -> refreshRealms(listener, r), listener::onFailure)); + inner.accept( + request, + ActionListener.wrap( + r -> clearRealmCachesOnAllNodes(client, ActionListener.wrap(aVoid -> listener.onResponse(r), listener::onFailure)), + listener::onFailure + ) + ); } catch (Exception e) { logger.error(() -> "failed to modify role-mapping [" + name + "]", e); listener.onFailure(e); @@ -392,7 +390,9 @@ public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, || isIndexDeleted(previousState, currentState) || Objects.equals(previousState.indexUUID, currentState.indexUUID) == false || previousState.isIndexUpToDate != currentState.isIndexUpToDate) { - refreshRealms(ActionListener.noop(), null); + // the notification that the index state changed is received on every node + // this means that here we need only to invalidate the local realm caches only + clearRealmCachesOnLocalNode(); } } @@ -414,38 +414,6 @@ public void resolveRoles(UserData user, ActionListener> listener) { }, listener::onFailure)); } - /** - * Indicates that the provided realm should have its cache cleared if this store is updated - * (that is, {@link #putRoleMapping(PutRoleMappingRequest, ActionListener)} or - * {@link #deleteRoleMapping(DeleteRoleMappingRequest, ActionListener)} are called). - * @see ClearRealmCacheAction - */ - @Override - public void refreshRealmOnChange(CachingRealm realm) { - realmsToRefresh.add(realm.name()); - } - - private void refreshRealms(ActionListener listener, Result result) { - if (enabled == false || realmsToRefresh.isEmpty()) { - listener.onResponse(result); - return; - } - final String[] realmNames = this.realmsToRefresh.toArray(Strings.EMPTY_ARRAY); - executeAsyncWithOrigin( - client, - SECURITY_ORIGIN, - ClearRealmCacheAction.INSTANCE, - new ClearRealmCacheRequest().realms(realmNames), - ActionListener.wrap(response -> { - logger.debug(() -> format("Cleared cached in realms [%s] due to role mapping change", Arrays.toString(realmNames))); - listener.onResponse(result); - }, ex -> { - logger.warn(() -> "Failed to clear cache for realms [" + Arrays.toString(realmNames) + "]", ex); - listener.onFailure(ex); - }) - ); - } - protected static ExpressionRoleMapping buildMapping(String id, BytesReference source) { try ( XContentParser parser = XContentHelper.createParserNotCompressed( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java index 12a2420ac13cc..f01914a7fed06 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmAuthenticateFailedTests.java @@ -160,7 +160,7 @@ public void testDelegatedAuthorizationFailedToResolve() throws Exception { eq(krbDebug), anyActionListener() ); - verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).clearRealmCacheOnChange(kerberosRealm); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java index 3fb849afd90d2..b1ddb631a8dd2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmCacheTests.java @@ -65,7 +65,7 @@ public void testAuthenticateWithCache() throws LoginException, GSSException { eq(krbDebug), anyActionListener() ); - verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).clearRealmCacheOnChange(kerberosRealm); verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), anyActionListener()); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } @@ -74,7 +74,7 @@ public void testCacheInvalidationScenarios() throws LoginException, GSSException final String outToken = randomAlphaOfLength(10); final List userNames = Arrays.asList(randomPrincipalName(), randomPrincipalName()); final KerberosRealm kerberosRealm = createKerberosRealm(userNames.toArray(new String[0])); - verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).clearRealmCacheOnChange(kerberosRealm); final String authNUsername = randomFrom(userNames); final byte[] decodedTicket = randomByteArrayOfLength(10); @@ -154,7 +154,7 @@ public void testAuthenticateWithValidTicketSucessAuthnWithUserDetailsWhenCacheDi eq(krbDebug), anyActionListener() ); - verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).clearRealmCacheOnChange(kerberosRealm); verify(mockNativeRoleMappingStore, times(2)).resolveRoles(any(UserData.class), anyActionListener()); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index ac98ee6d19b18..177507ce6d792 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -97,7 +97,7 @@ public void testAuthenticateWithValidTicketSucessAuthnWithUserDetails() throws L eq(krbDebug), anyActionListener() ); - verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).clearRealmCacheOnChange(kerberosRealm); verify(mockNativeRoleMappingStore).resolveRoles(any(UserData.class), anyActionListener()); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); } @@ -255,7 +255,7 @@ public void testDelegatedAuthorization() throws Exception { eq(krbDebug), anyActionListener() ); - verify(mockNativeRoleMappingStore).refreshRealmOnChange(kerberosRealm); + verify(mockNativeRoleMappingStore).clearRealmCacheOnChange(kerberosRealm); verifyNoMoreInteractions(mockKerberosTicketValidator, mockNativeRoleMappingStore); verify(otherRealm, times(2)).lookupUser(eq(expectedUsername), anyActionListener()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index caafd1b919e88..b0821864aacc7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -318,7 +318,7 @@ public void testAuthenticateCachingClearsCacheOnRoleMapperRefresh() throws Excep verify(sessionFactory, times(1)).session(eq("CN=ironman"), any(SecureString.class), anyActionListener()); // Refresh the role mappings - roleMapper.notifyRefresh(); + roleMapper.clearRealmCachesOnLocalNode(); for (int i = 0; i < count; i++) { PlainActionFuture> future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java index 4ed8832228e85..7083d1301a3e6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java @@ -259,7 +259,7 @@ public void testAuthenticateCachingRefresh() throws Exception { // verify one and only one session -> caching is working verify(ldapFactory, times(1)).session(anyString(), any(SecureString.class), anyActionListener()); - roleMapper.notifyRefresh(); + roleMapper.clearRealmCachesOnLocalNode(); future = new PlainActionFuture<>(); ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java index 092ee92fc0ba5..eef5b0b105255 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java @@ -140,7 +140,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception X509AuthenticationToken token = buildToken(); UserRoleMapper roleMapper = buildRoleMapper(roles, token.dn()); PkiRealm realm = buildRealm(roleMapper, globalSettings); - verify(roleMapper).refreshRealmOnChange(realm); + verify(roleMapper).clearRealmCacheOnChange(realm); final String expectedUsername = PkiRealm.getPrincipalFromSubjectDN( Pattern.compile(PkiRealmSettings.DEFAULT_USERNAME_PATTERN), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java index 742417e6bf76a..6332e63ca5958 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.security.audit.logfile.CapturingLogger; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.CachingRealm; import org.elasticsearch.xpack.core.security.authc.support.DnRoleMapperSettings; import org.junit.After; import org.junit.Before; @@ -51,6 +52,9 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class DnRoleMapperTests extends ESTestCase { @@ -103,7 +107,7 @@ public void testMapper_AutoReload() throws Exception { try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) { DnRoleMapper mapper = createMapper(file, watcherService); - mapper.addListener(latch::countDown); + mapper.clearRealmCacheOnChange(mockCachingRealm(randomAlphaOfLength(8), latch)); Set roles = mapper.resolveRoles("", Collections.singletonList("cn=shield,ou=marvel,o=superheros")); assertThat(roles, notNullValue()); @@ -147,7 +151,7 @@ public void testMapper_AutoReload_WithParseFailures() throws Exception { try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) { DnRoleMapper mapper = createMapper(file, watcherService); - mapper.addListener(latch::countDown); + mapper.clearRealmCacheOnChange(mockCachingRealm(randomAlphaOfLength(8), latch)); Set roles = mapper.resolveRoles("", Collections.singletonList("cn=shield,ou=marvel,o=superheros")); assertThat(roles, notNullValue()); @@ -191,16 +195,6 @@ public void testMapperAutoReloadWithoutListener() throws Exception { } } - public void testAddNullListener() throws Exception { - Path file = env.configFile().resolve("test_role_mapping.yml"); - Files.write(file, Collections.singleton("")); - try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) { - DnRoleMapper mapper = createMapper(file, watcherService); - NullPointerException e = expectThrows(NullPointerException.class, () -> mapper.addListener(null)); - assertEquals("listener cannot be null", e.getMessage()); - } - } - public void testParseFile() throws Exception { Path file = getDataPath("role_mapping.yml"); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); @@ -366,4 +360,14 @@ protected DnRoleMapper createMapper(Path file, ResourceWatcherService watcherSer RealmConfig config = new RealmConfig(identifier, mergedSettings, env, new ThreadContext(Settings.EMPTY)); return new DnRoleMapper(config, watcherService); } + + private static CachingRealm mockCachingRealm(String name, CountDownLatch latch) { + CachingRealm cachingRealm = mock(CachingRealm.class); + when(cachingRealm.name()).thenReturn(name); + doAnswer(inv -> { + latch.countDown(); + return null; + }).when(cachingRealm).expireAll(); + return cachingRealm; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapperTests.java index 0a3df418a8fc2..42aa5f843e13a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExcludingRoleMapperTests.java @@ -71,9 +71,9 @@ public void testNothingToExclude() throws Exception { public void testRefreshRealmOnChange() { final UserRoleMapper delegate = mock(UserRoleMapper.class); final CachingRealm realm = mock(CachingRealm.class); - new ExcludingRoleMapper(delegate, randomSet(0, 5, () -> randomAlphaOfLengthBetween(3, 6))).refreshRealmOnChange(realm); + new ExcludingRoleMapper(delegate, randomSet(0, 5, () -> randomAlphaOfLengthBetween(3, 6))).clearRealmCacheOnChange(realm); - verify(delegate, times(1)).refreshRealmOnChange(same(realm)); + verify(delegate, times(1)).clearRealmCacheOnChange(same(realm)); verify(delegate, times(0)).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index a0008ba632151..4add4fd37fff5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngine; @@ -36,18 +35,15 @@ import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheResponse; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; +import org.elasticsearch.xpack.core.security.authc.support.CachingRealm; import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression.FieldValue; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; -import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.hamcrest.Matchers; import org.junit.Before; @@ -425,33 +421,38 @@ private SecurityIndexManager.State indexState(boolean isUpToDate, ClusterHealthS } public void testCacheClearOnIndexHealthChange() { - final AtomicInteger numInvalidation = new AtomicInteger(0); - final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, true); + final AtomicInteger numGlobalInvalidation = new AtomicInteger(0); + final AtomicInteger numLocalInvalidation = new AtomicInteger(0); + final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numGlobalInvalidation, numLocalInvalidation, true); int expectedInvalidation = 0; // existing to no longer present SecurityIndexManager.State previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); SecurityIndexManager.State currentState = dummyState(null); store.onSecurityIndexStateChange(previousState, currentState); - assertEquals(++expectedInvalidation, numInvalidation.get()); + assertEquals(++expectedInvalidation, numLocalInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); // doesn't exist to exists previousState = dummyState(null); currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); store.onSecurityIndexStateChange(previousState, currentState); - assertEquals(++expectedInvalidation, numInvalidation.get()); + assertEquals(++expectedInvalidation, numLocalInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); // green or yellow to red previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); currentState = dummyState(ClusterHealthStatus.RED); store.onSecurityIndexStateChange(previousState, currentState); - assertEquals(expectedInvalidation, numInvalidation.get()); + assertEquals(expectedInvalidation, numLocalInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); // red to non red previousState = dummyState(ClusterHealthStatus.RED); currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); store.onSecurityIndexStateChange(previousState, currentState); - assertEquals(++expectedInvalidation, numInvalidation.get()); + assertEquals(++expectedInvalidation, numLocalInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); // green to yellow or yellow to green previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); @@ -459,28 +460,38 @@ public void testCacheClearOnIndexHealthChange() { previousState.indexHealth == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN ); store.onSecurityIndexStateChange(previousState, currentState); - assertEquals(expectedInvalidation, numInvalidation.get()); + assertEquals(expectedInvalidation, numLocalInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); } public void testCacheClearOnIndexOutOfDateChange() { - final AtomicInteger numInvalidation = new AtomicInteger(0); - final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, true); + final AtomicInteger numGlobalInvalidation = new AtomicInteger(0); + final AtomicInteger numLocalInvalidation = new AtomicInteger(0); + final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numGlobalInvalidation, numLocalInvalidation, true); store.onSecurityIndexStateChange(indexState(false, null), indexState(true, null)); - assertEquals(1, numInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); + assertEquals(1, numLocalInvalidation.get()); store.onSecurityIndexStateChange(indexState(true, null), indexState(false, null)); - assertEquals(2, numInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); + assertEquals(2, numLocalInvalidation.get()); } public void testCacheIsNotClearedIfNoRealmsAreAttached() { - final AtomicInteger numInvalidation = new AtomicInteger(0); - final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, false); + final AtomicInteger numGlobalInvalidation = new AtomicInteger(0); + final AtomicInteger numLocalInvalidation = new AtomicInteger(0); + final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting( + numGlobalInvalidation, + numLocalInvalidation, + false + ); final SecurityIndexManager.State noIndexState = dummyState(null); final SecurityIndexManager.State greenIndexState = dummyState(ClusterHealthStatus.GREEN); store.onSecurityIndexStateChange(noIndexState, greenIndexState); - assertEquals(0, numInvalidation.get()); + assertEquals(0, numGlobalInvalidation.get()); + assertEquals(0, numLocalInvalidation.get()); } public void testPutRoleMappingWillValidateTemplateRoleNamesBeforeSave() { @@ -499,7 +510,11 @@ public void testPutRoleMappingWillValidateTemplateRoleNamesBeforeSave() { expectThrows(IllegalArgumentException.class, () -> nativeRoleMappingStore.putRoleMapping(putRoleMappingRequest, null)); } - private NativeRoleMappingStore buildRoleMappingStoreForInvalidationTesting(AtomicInteger invalidationCounter, boolean attachRealm) { + private NativeRoleMappingStore buildRoleMappingStoreForInvalidationTesting( + AtomicInteger globalInvalidationCounter, + AtomicInteger localInvalidationCounter, + boolean attachRealm + ) { final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); final ThreadPool threadPool = mock(ThreadPool.class); @@ -518,7 +533,7 @@ private NativeRoleMappingStore buildRoleMappingStoreForInvalidationTesting(Atomi @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - invalidationCounter.incrementAndGet(); + globalInvalidationCounter.incrementAndGet(); listener.onResponse(new ClearRealmCacheResponse(new ClusterName("cluster"), Collections.emptyList(), Collections.emptyList())); return null; }).when(client).execute(eq(ClearRealmCacheAction.INSTANCE), any(ClearRealmCacheRequest.class), anyActionListener()); @@ -531,26 +546,13 @@ private NativeRoleMappingStore buildRoleMappingStoreForInvalidationTesting(Atomi ); if (attachRealm) { - final Environment env = TestEnvironment.newEnvironment(settings); - final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier("ldap", realmName); - final RealmConfig realmConfig = new RealmConfig( - identifier, - Settings.builder().put(settings).put(RealmSettings.getFullSettingKey(identifier, RealmSettings.ORDER_SETTING), 0).build(), - env, - threadContext - ); - final CachingUsernamePasswordRealm mockRealm = new CachingUsernamePasswordRealm(realmConfig, threadPool) { - @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener> listener) { - listener.onResponse(AuthenticationResult.notHandled()); - } - - @Override - protected void doLookupUser(String username, ActionListener listener) { - listener.onResponse(null); - } - }; - store.refreshRealmOnChange(mockRealm); + CachingRealm mockRealm = mock(CachingRealm.class); + when(mockRealm.name()).thenReturn("mockRealm"); + doAnswer(inv -> { + localInvalidationCounter.incrementAndGet(); + return null; + }).when(mockRealm).expireAll(); + store.clearRealmCacheOnChange(mockRealm); } return store; } diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java index 839d21bbd3e8a..c14158060a54c 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/realm/CustomRoleMappingRealm.java @@ -42,7 +42,7 @@ public CustomRoleMappingRealm(RealmConfig config, UserRoleMapper roleMapper) { super(config); this.cache = CacheBuilder.builder().build(); this.roleMapper = roleMapper; - this.roleMapper.refreshRealmOnChange(this); + this.roleMapper.clearRealmCacheOnChange(this); } @Override From 4f10afa388925281c65a940928827f7e95799109 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 19 Apr 2024 20:34:06 +0200 Subject: [PATCH 118/130] Address zstd release test failures in CodecTests. (#107477) The tests can only be run when zstd feature flag is enabled. Closes #107417 --- .../elasticsearch/index/codec/CodecTests.java | 5 ++-- .../mapper/CompletionFieldMapperTests.java | 14 ++++++++--- .../vectors/DenseVectorFieldMapperTests.java | 23 ++++++++++++++----- 3 files changed, 31 insertions(+), 11 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 5b1b63e9ce82d..cce5e4c057a97 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -18,7 +18,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.Directory; -import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.settings.Settings; @@ -40,17 +39,18 @@ import static org.hamcrest.Matchers.instanceOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107417") @SuppressCodecs("*") // we test against default codec so never get a random one here! public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { + assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); assertThat(codecService.codec("default"), instanceOf(Elasticsearch814Codec.class)); } public void testDefault() throws Exception { + assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); Codec codec = createCodecService().codec("default"); assertEquals( "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=0), chunkSize=14336, maxDocsPerChunk=128, blockShift=10)", @@ -59,6 +59,7 @@ public void testDefault() throws Exception { } public void testBestCompression() throws Exception { + assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); Codec codec = createCodecService().codec("best_compression"); assertEquals( "Zstd814StoredFieldsFormat(compressionMode=ZSTD(level=3), chunkSize=245760, maxDocsPerChunk=2048, blockShift=10)", diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 229d16ab85aef..982a7ed6afaa5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.codec.CodecService; +import org.elasticsearch.index.codec.LegacyPerFieldMapperCodec; import org.elasticsearch.index.codec.PerFieldMapperCodec; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -147,9 +148,16 @@ public void testPostingsFormat() throws IOException { MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); - assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - PerFieldMapperCodec perFieldCodec = (PerFieldMapperCodec) codec; - assertThat(perFieldCodec.getPostingsFormatForField("field"), instanceOf(Completion99PostingsFormat.class)); + if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + assertThat(codec, instanceOf(PerFieldMapperCodec.class)); + assertThat(((PerFieldMapperCodec) codec).getPostingsFormatForField("field"), instanceOf(Completion99PostingsFormat.class)); + } else { + assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); + assertThat( + ((LegacyPerFieldMapperCodec) codec).getPostingsFormatForField("field"), + instanceOf(Completion99PostingsFormat.class) + ); + } } public void testDefaultConfiguration() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 41707cdfdded8..4072e0e95bfe7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.codec.CodecService; +import org.elasticsearch.index.codec.LegacyPerFieldMapperCodec; import org.elasticsearch.index.codec.PerFieldMapperCodec; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentParsingException; @@ -1092,7 +1093,6 @@ public void testFloatVectorQueryBoundaries() throws IOException { ); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107417") public void testKnnVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); @@ -1115,8 +1115,14 @@ public void testKnnVectorsFormat() throws IOException { })); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); - assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + KnnVectorsFormat knnVectorsFormat; + if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + assertThat(codec, instanceOf(PerFieldMapperCodec.class)); + knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + } else { + assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); + knnVectorsFormat = ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + } String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + (setM ? m : DEFAULT_MAX_CONN) + ", beamWidth=" @@ -1126,7 +1132,6 @@ public void testKnnVectorsFormat() throws IOException { assertEquals(expectedString, knnVectorsFormat.toString()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107417") public void testKnnQuantizedHNSWVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); @@ -1148,8 +1153,14 @@ public void testKnnQuantizedHNSWVectorsFormat() throws IOException { })); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); - assertThat(codec, instanceOf(PerFieldMapperCodec.class)); - KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + KnnVectorsFormat knnVectorsFormat; + if (CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { + assertThat(codec, instanceOf(PerFieldMapperCodec.class)); + knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + } else { + assertThat(codec, instanceOf(LegacyPerFieldMapperCodec.class)); + knnVectorsFormat = ((LegacyPerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + } String expectedString = "ES814HnswScalarQuantizedVectorsFormat(name=ES814HnswScalarQuantizedVectorsFormat, maxConn=" + m + ", beamWidth=" From 12d3d3a520fcfb7b1acf7a5aeccf3c75b600a5d6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 19 Apr 2024 22:07:23 -0700 Subject: [PATCH 119/130] Avoid O(n^2) in mergeOutputExpressions (#107659) Replace the list with a map of last positions to avoid O(N^2). --- .../xpack/esql/expression/NamedExpressions.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java index 8c3e32d971d5b..895a471033b26 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/NamedExpressions.java @@ -7,12 +7,14 @@ package org.elasticsearch.xpack.esql.expression; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.NamedExpression; import java.util.ArrayList; import java.util.List; +import java.util.Map; public class NamedExpressions { @@ -41,17 +43,20 @@ public static List mergeOutputExpressions( List fields, List childOutput ) { - List fieldNames = Expressions.names(fields); + Map lastPositions = Maps.newHashMapWithExpectedSize(fields.size()); + for (int i = 0; i < fields.size(); i++) { + lastPositions.put(fields.get(i).name(), i); + } List output = new ArrayList<>(childOutput.size() + fields.size()); for (NamedExpression childAttr : childOutput) { - if (fieldNames.contains(childAttr.name()) == false) { + if (lastPositions.containsKey(childAttr.name()) == false) { output.add(childAttr); } } // do not add duplicate fields multiple times, only last one matters as output for (int i = 0; i < fields.size(); i++) { NamedExpression field = fields.get(i); - if (fieldNames.lastIndexOf(field.name()) == i) { + if (lastPositions.get(field.name()) == i) { output.add(field); } } From a3a1cdd620cb511b906c2ec26ff38a4f14a0cae1 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sat, 20 Apr 2024 19:59:58 +0200 Subject: [PATCH 120/130] Cleanup some TransportAction dependencies (#107661) Lots of unused stuff at this point due to various refactorings, also fixed a couple of private variable shadows a parent class variable spots. --- .../DataStreamsStatsTransportAction.java | 4 --- .../stats/GeoIpStatsTransportAction.java | 2 -- .../elasticsearch/action/ActionModule.java | 2 +- .../TransportUpdateDesiredNodesAction.java | 2 -- .../TransportPendingClusterTasksAction.java | 3 -- .../TransportFindDanglingIndexAction.java | 2 -- .../TransportListDanglingIndicesAction.java | 2 -- .../TransportGetFieldMappingsIndexAction.java | 2 -- .../indices/resolve/ResolveIndexAction.java | 4 --- .../TransportFieldCapabilitiesAction.java | 2 -- .../node/TransportBroadcastByNodeAction.java | 6 ++-- .../action/update/TransportUpdateAction.java | 2 -- .../rest/action/document/RestIndexAction.java | 8 +---- .../TransportResolveIndexActionTests.java | 1 - .../TransportBroadcastByNodeActionTests.java | 6 ++-- .../action/document/RestIndexActionTests.java | 2 +- .../action/TransportForgetFollowerAction.java | 2 -- .../action/TransportResumeFollowAction.java | 2 -- .../action/XPackUsageRestCancellationIT.java | 9 ++--- .../AbstractTransportSetResetModeAction.java | 2 -- .../downsample/TransportDownsampleAction.java | 2 -- .../TransportDownsampleIndexerAction.java | 2 -- .../EnterpriseSearchUsageTransportAction.java | 3 -- .../TransportDeleteConnectorAction.java | 8 +---- .../action/TransportGetConnectorAction.java | 8 +---- .../action/TransportListConnectorAction.java | 8 +---- .../action/TransportPostConnectorAction.java | 8 +---- .../action/TransportPutConnectorAction.java | 8 +---- ...tUpdateConnectorActiveFilteringAction.java | 8 +---- ...ransportUpdateConnectorApiKeyIdAction.java | 8 +---- ...ortUpdateConnectorConfigurationAction.java | 8 +---- .../TransportUpdateConnectorErrorAction.java | 8 +---- ...ansportUpdateConnectorFilteringAction.java | 8 +---- ...ateConnectorFilteringValidationAction.java | 2 -- ...ansportUpdateConnectorIndexNameAction.java | 8 +---- ...ransportUpdateConnectorLastSeenAction.java | 8 +---- ...ortUpdateConnectorLastSyncStatsAction.java | 8 +---- .../TransportUpdateConnectorNameAction.java | 8 +---- .../TransportUpdateConnectorNativeAction.java | 8 +---- ...ransportUpdateConnectorPipelineAction.java | 8 +---- ...nsportUpdateConnectorSchedulingAction.java | 8 +---- ...sportUpdateConnectorServiceTypeAction.java | 8 +---- .../TransportUpdateConnectorStatusAction.java | 8 +---- ...TransportCancelConnectorSyncJobAction.java | 8 +---- ...ransportCheckInConnectorSyncJobAction.java | 8 +---- ...TransportDeleteConnectorSyncJobAction.java | 8 +---- .../TransportGetConnectorSyncJobAction.java | 8 +---- .../TransportListConnectorSyncJobsAction.java | 8 +---- .../TransportPostConnectorSyncJobAction.java | 8 +---- ...portUpdateConnectorSyncJobErrorAction.java | 8 +---- ...eConnectorSyncJobIngestionStatsAction.java | 2 -- ...TransportQuerySearchApplicationAction.java | 3 -- ...ortRenderSearchApplicationQueryAction.java | 4 --- ...portCancelConnectorSyncJobActionTests.java | 2 +- ...ortCheckInConnectorSyncJobActionTests.java | 2 +- ...portDeleteConnectorSyncJobActionTests.java | 2 +- ...ansportGetConnectorSyncJobActionTests.java | 2 +- ...sportListConnectorSyncJobsActionTests.java | 2 +- ...nsportPostConnectorSyncJobActionTests.java | 2 +- ...pdateConnectorSyncJobErrorActionTests.java | 2 +- ...ectorSyncJobIngestionStatsActionTests.java | 7 +--- .../eql/plugin/TransportEqlSearchAction.java | 2 -- .../org/elasticsearch/xpack/fleet/Fleet.java | 2 +- .../rest/RestFleetMultiSearchAction.java | 4 --- .../TransportDeleteInferenceModelAction.java | 4 --- .../AggregateMetricInfoTransportAction.java | 9 +---- .../AggregateMetricUsageTransportAction.java | 6 +--- .../ml/action/TransportCloseJobAction.java | 2 -- .../action/TransportGetJobsStatsAction.java | 2 -- .../action/TransportSetUpgradeModeAction.java | 2 -- .../SearchableSnapshots.java | 6 +--- .../cache/blob/BlobStoreCacheService.java | 5 +-- .../blob/BlobStoreCacheServiceTests.java | 6 ++-- .../cache/common/TestUtils.java | 4 +-- .../xpack/security/Security.java | 8 ++--- .../TransportProfileHasPrivilegesAction.java | 4 --- .../action/role/TransportGetRolesAction.java | 9 +---- ...TransportUpdateSecuritySettingsAction.java | 3 -- .../interceptor/SearchRequestInterceptor.java | 6 +--- .../ShardSearchRequestInterceptor.java | 6 +--- .../rest/action/role/RestPutRoleAction.java | 10 +----- .../user/RestProfileHasPrivilegesAction.java | 6 +--- ...nsportProfileHasPrivilegesActionTests.java | 11 ++---- .../role/TransportGetRolesActionTests.java | 35 +++---------------- .../SearchRequestInterceptorTests.java | 2 +- .../ShardSearchRequestInterceptorTests.java | 9 ++--- .../action/role/RestPutRoleActionTests.java | 2 +- .../RestProfileHasPrivilegesActionTests.java | 3 -- .../xpack/sql/SqlInfoTransportAction.java | 6 +--- .../xpack/sql/SqlUsageTransportAction.java | 4 --- .../sql/SqlInfoTransportActionTests.java | 3 +- .../TransportScheduleNowTransformAction.java | 10 ------ .../action/TransportStopTransformAction.java | 4 +-- 93 files changed, 78 insertions(+), 429 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index c7d70fa06162d..1b18f8b799f4d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -51,9 +51,7 @@ public class DataStreamsStatsTransportAction extends TransportBroadcastByNodeAct DataStreamsStatsAction.Response, DataStreamsStatsAction.DataStreamShardStats> { - private final ClusterService clusterService; private final IndicesService indicesService; - private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject public DataStreamsStatsTransportAction( @@ -72,9 +70,7 @@ public DataStreamsStatsTransportAction( DataStreamsStatsAction.Request::new, transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT) ); - this.clusterService = clusterService; this.indicesService = indicesService; - this.indexNameExpressionResolver = indexNameExpressionResolver; } @Override diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java index 1e1778a81c4a8..6a3aa81f82e9e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpStatsTransportAction.java @@ -31,7 +31,6 @@ public class GeoIpStatsTransportAction extends TransportNodesAction { - private final TransportService transportService; private final DatabaseNodeService registry; private final GeoIpDownloaderTaskExecutor geoIpDownloaderTaskExecutor; @@ -52,7 +51,6 @@ public GeoIpStatsTransportAction( NodeRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.transportService = transportService; this.registry = registry; this.geoIpDownloaderTaskExecutor = geoIpDownloaderTaskExecutor; } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index cd01184801c64..e8588e738569f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -922,7 +922,7 @@ public void initRestHandlers(Supplier nodesInCluster, Predicate< registerHandler.accept(new RestIndexAction()); registerHandler.accept(new CreateHandler()); - registerHandler.accept(new AutoIdHandler(nodesInCluster)); + registerHandler.accept(new AutoIdHandler()); registerHandler.accept(new RestGetAction()); registerHandler.accept(new RestGetSourceAction()); registerHandler.accept(new RestMultiGetAction(settings)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index fed28366568e8..ee8295381dd88 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -42,7 +42,6 @@ public class TransportUpdateDesiredNodesAction extends TransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportUpdateDesiredNodesAction.class); - private final RerouteService rerouteService; private final FeatureService featureService; private final MasterServiceTaskQueue taskQueue; @@ -69,7 +68,6 @@ public TransportUpdateDesiredNodesAction( UpdateDesiredNodesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.rerouteService = rerouteService; this.featureService = featureService; this.taskQueue = clusterService.createTaskQueue( "update-desired-nodes", diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java index efca8ab779b20..b9a16a4fa44bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java @@ -34,8 +34,6 @@ public class TransportPendingClusterTasksAction extends TransportMasterNodeReadA public static final ActionType TYPE = new ActionType<>("cluster:monitor/task"); private static final Logger logger = LogManager.getLogger(TransportPendingClusterTasksAction.class); - private final ClusterService clusterService; - @Inject public TransportPendingClusterTasksAction( TransportService transportService, @@ -55,7 +53,6 @@ public TransportPendingClusterTasksAction( PendingClusterTasksResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.clusterService = clusterService; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java index c39f757887e53..b6e8693acc66d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/find/TransportFindDanglingIndexAction.java @@ -37,7 +37,6 @@ public class TransportFindDanglingIndexAction extends TransportNodesAction< public static final ActionType TYPE = new ActionType<>("cluster:admin/indices/dangling/find"); - private final TransportService transportService; private final DanglingIndicesState danglingIndicesState; @Inject @@ -56,7 +55,6 @@ public TransportFindDanglingIndexAction( NodeFindDanglingIndexRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.transportService = transportService; this.danglingIndicesState = danglingIndicesState; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java index e347874599ff4..f6b809ea1ea49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/list/TransportListDanglingIndicesAction.java @@ -39,7 +39,6 @@ public class TransportListDanglingIndicesAction extends TransportNodesAction< public static final ActionType TYPE = new ActionType<>("cluster:admin/indices/dangling/list"); - private final TransportService transportService; private final DanglingIndicesState danglingIndicesState; @Inject @@ -58,7 +57,6 @@ public TransportListDanglingIndicesAction( NodeListDanglingIndicesRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.transportService = transportService; this.danglingIndicesState = danglingIndicesState; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 28c01198f516f..679e344051873 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -52,7 +52,6 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc private static final String ACTION_NAME = GetFieldMappingsAction.NAME + "[index]"; public static final ActionType TYPE = new ActionType<>(ACTION_NAME); - protected final ClusterService clusterService; private final IndicesService indicesService; @Inject @@ -74,7 +73,6 @@ public TransportGetFieldMappingsIndexAction( GetFieldMappingsIndexRequest::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.clusterService = clusterService; this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index da0cc956cf9cc..1f23ee724e542 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportService; @@ -441,7 +440,6 @@ public int hashCode() { public static class TransportAction extends HandledTransportAction { - private final ThreadPool threadPool; private final ClusterService clusterService; private final RemoteClusterService remoteClusterService; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -451,12 +449,10 @@ public static class TransportAction extends HandledTransportAction nodesInCluster; - - public AutoIdHandler(Supplier nodesInCluster) { - this.nodesInCluster = nodesInCluster; - } + public AutoIdHandler() {} @Override public String getName() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java index 4c86faecd7aa6..d96210b6555fc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveIndexActionTests.java @@ -78,7 +78,6 @@ public void writeTo(StreamOutput out) throws IOException { ResolveIndexAction.TransportAction action = new ResolveIndexAction.TransportAction( transportService, clusterService, - threadPool, actionFilters, null ); diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index b175123942cf7..9748fb4a0d422 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -168,12 +168,12 @@ class TestTransportBroadcastByNodeAction extends TransportBroadcastByNodeAction< TestTransportBroadcastByNodeAction(String actionName) { super( actionName, - clusterService, - transportService, + TransportBroadcastByNodeActionTests.this.clusterService, + TransportBroadcastByNodeActionTests.this.transportService, new ActionFilters(Set.of()), new MyResolver(), Request::new, - transportService.getThreadPool().executor(TEST_THREAD_POOL_NAME) + TransportBroadcastByNodeActionTests.this.transportService.getThreadPool().executor(TEST_THREAD_POOL_NAME) ); } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java index c2145991e51fa..c8640462edc66 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestIndexActionTests.java @@ -46,7 +46,7 @@ public final class RestIndexActionTests extends RestActionTestCase { public void setUpAction() { controller().registerHandler(new RestIndexAction()); controller().registerHandler(new CreateHandler()); - controller().registerHandler(new AutoIdHandler(() -> clusterStateSupplier.get().nodes())); + controller().registerHandler(new AutoIdHandler()); } public void testCreateOpTypeValidation() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java index b8b8fa0b32c31..312adde64a68a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportForgetFollowerAction.java @@ -47,7 +47,6 @@ public class TransportForgetFollowerAction extends TransportBroadcastByNodeActio BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { - private final ClusterService clusterService; private final IndicesService indicesService; @Inject @@ -67,7 +66,6 @@ public TransportForgetFollowerAction( ForgetFollowerAction.Request::new, transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT) ); - this.clusterService = clusterService; this.indicesService = Objects.requireNonNull(indicesService); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 8fc901fee4010..f57e0f0a85a43 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -80,7 +80,6 @@ public class TransportResumeFollowAction extends AcknowledgedTransportMasterNode static final TimeValue DEFAULT_READ_POLL_TIMEOUT = TimeValue.timeValueMinutes(1); private final Client client; - private final ThreadPool threadPool; private final Executor remoteClientResponseExecutor; private final PersistentTasksService persistentTasksService; private final IndicesService indicesService; @@ -110,7 +109,6 @@ public TransportResumeFollowAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.client = client; - this.threadPool = threadPool; this.remoteClientResponseExecutor = threadPool.executor(CCR_THREAD_POOL_NAME); this.persistentTasksService = persistentTasksService; this.indicesService = indicesService; diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java index 1e36dc5c603ab..d019851263f6b 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/core/rest/action/XPackUsageRestCancellationIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; @@ -148,9 +147,7 @@ public BlockingXPackUsageAction( ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Settings settings, - XPackLicenseState licenseState + IndexNameExpressionResolver indexNameExpressionResolver ) { super( BlockingUsageActionXPackPlugin.BLOCKING_XPACK_USAGE.name(), @@ -187,9 +184,7 @@ public NonBlockingXPackUsageAction( ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Settings settings, - XPackLicenseState licenseState + IndexNameExpressionResolver indexNameExpressionResolver ) { super( BlockingUsageActionXPackPlugin.NON_BLOCKING_XPACK_USAGE.name(), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java index f94d7c6caae36..6e570d2f6fa17 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java @@ -33,7 +33,6 @@ public abstract class AbstractTransportSetResetModeAction extends AcknowledgedTransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(AbstractTransportSetResetModeAction.class); - private final ClusterService clusterService; @Inject public AbstractTransportSetResetModeAction( @@ -54,7 +53,6 @@ public AbstractTransportSetResetModeAction( indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.clusterService = clusterService; } protected abstract boolean isResetMode(ClusterState clusterState); diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 468b4331d34ab..e370ab5383fd5 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -110,7 +110,6 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc private final Client client; private final IndicesService indicesService; - private final ClusterService clusterService; private final MasterServiceTaskQueue taskQueue; private final MetadataCreateIndexService metadataCreateIndexService; private final IndexScopedSettings indexScopedSettings; @@ -170,7 +169,6 @@ public TransportDownsampleAction( ); this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); this.indicesService = indicesService; - this.clusterService = clusterService; this.metadataCreateIndexService = metadataCreateIndexService; this.indexScopedSettings = indexScopedSettings; this.threadContext = threadPool.getThreadContext(); diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java index f7cfe2d859583..d316ca8a9e675 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java @@ -49,7 +49,6 @@ public class TransportDownsampleIndexerAction extends TransportBroadcastAction< DownsampleIndexerAction.ShardDownsampleResponse> { private final Client client; - private final ClusterService clusterService; private final IndicesService indicesService; private final DownsampleMetrics downsampleMetrics; @@ -75,7 +74,6 @@ public TransportDownsampleIndexerAction( transportService.getThreadPool().executor(Downsample.DOWNSAMPLE_TASK_THREAD_POOL_NAME) ); this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); - this.clusterService = clusterService; this.indicesService = indicesService; this.downsampleMetrics = downsampleMetrics; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchUsageTransportAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchUsageTransportAction.java index 204dfc967b3b6..38fc30760d728 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchUsageTransportAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchUsageTransportAction.java @@ -22,8 +22,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -58,7 +56,6 @@ import static org.elasticsearch.xpack.core.application.EnterpriseSearchFeatureSetUsage.TOTAL_RULE_COUNT; public class EnterpriseSearchUsageTransportAction extends XPackUsageFeatureTransportAction { - private static final Logger logger = LogManager.getLogger(EnterpriseSearchUsageTransportAction.class); private final XPackLicenseState licenseState; private final OriginSettingClient clientWithOrigin; private final IndicesAdminClient indicesAdminClient; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java index e784bc054ffcd..9c71adbda78f0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; @@ -24,12 +23,7 @@ public class TransportDeleteConnectorAction extends HandledTransportAction { - private static final Logger logger = LogManager.getLogger(TransportQuerySearchApplicationAction.class); protected final SearchApplicationIndexService systemIndexService; private final Client client; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java index 06a57d2cd2bae..3d71d31427c09 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java @@ -19,8 +19,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.features.FeatureService; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; @@ -36,8 +34,6 @@ public class TransportRenderSearchApplicationQueryAction extends HandledTranspor SearchApplicationSearchRequest, RenderSearchApplicationQueryAction.Response> { - private static final Logger logger = LogManager.getLogger(TransportRenderSearchApplicationQueryAction.class); - protected final SearchApplicationIndexService systemIndexService; private final SearchApplicationTemplateService templateService; diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java index 81c56e3345e28..d8410cb467eca 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java @@ -46,7 +46,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportCancelConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportCancelConnectorSyncJobAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java index d88a246b6d5e2..e154df903ac1c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java @@ -46,7 +46,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportCheckInConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportCheckInConnectorSyncJobAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java index 9130c44d30da7..fefa4e643d35f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java @@ -47,7 +47,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportDeleteConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportDeleteConnectorSyncJobAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java index 7b83d008d92bc..3ce50b584f785 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java @@ -47,7 +47,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportGetConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportGetConnectorSyncJobAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java index 503e7e54255e3..4826604169006 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java @@ -46,7 +46,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportListConnectorSyncJobsAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportListConnectorSyncJobsAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java index 2463637ada2dd..bdc86ea03713b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java @@ -47,7 +47,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportPostConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportPostConnectorSyncJobAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java index fd974d5fd21f7..2254e852a43f3 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java @@ -46,7 +46,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportUpdateConnectorSyncJobErrorAction(transportService, clusterService, mock(ActionFilters.class), client()); + action = new TransportUpdateConnectorSyncJobErrorAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java index 625c2e6d96cda..3bc845660437a 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java @@ -46,12 +46,7 @@ public void setup() { Collections.emptySet() ); - action = new TransportUpdateConnectorSyncJobIngestionStatsAction( - transportService, - clusterService, - mock(ActionFilters.class), - client() - ); + action = new TransportUpdateConnectorSyncJobIngestionStatsAction(transportService, mock(ActionFilters.class), client()); } @Override diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java index 7c8204d1ecf39..9f87ba5c60779 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlSearchAction.java @@ -69,7 +69,6 @@ public final class TransportEqlSearchAction extends HandledTransportAction asyncTaskManagementService; @@ -92,7 +91,6 @@ public TransportEqlSearchAction( : null; this.clusterService = clusterService; this.planExecutor = planExecutor; - this.threadPool = threadPool; this.transportService = transportService; this.asyncTaskManagementService = new AsyncTaskManagementService<>( diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index b16bea7c65b5b..9c840ba36749a 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -368,7 +368,7 @@ public List getRestHandlers( return List.of( new RestGetGlobalCheckpointsAction(), new RestFleetSearchAction(restController.getSearchUsageHolder(), clusterSupportsFeature), - new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), namedWriteableRegistry, clusterSupportsFeature), + new RestFleetMultiSearchAction(settings, restController.getSearchUsageHolder(), clusterSupportsFeature), new RestGetSecretsAction(), new RestPostSecretsAction(), new RestDeleteSecretsAction() diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index 530b64729a5d1..480230b44edc0 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; @@ -43,18 +42,15 @@ public class RestFleetMultiSearchAction extends BaseRestHandler { private final boolean allowExplicitIndex; private final SearchUsageHolder searchUsageHolder; - private final NamedWriteableRegistry namedWriteableRegistry; private final Predicate clusterSupportsFeature; public RestFleetMultiSearchAction( Settings settings, SearchUsageHolder searchUsageHolder, - NamedWriteableRegistry namedWriteableRegistry, Predicate clusterSupportsFeature ) { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); this.searchUsageHolder = searchUsageHolder; - this.namedWriteableRegistry = namedWriteableRegistry; this.clusterSupportsFeature = clusterSupportsFeature; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index b55e2e6f8ebed..a3f402931ce54 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.inference.action; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -32,8 +30,6 @@ public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { - private static final Logger logger = LogManager.getLogger(TransportDeleteInferenceModelAction.class); - private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricInfoTransportAction.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricInfoTransportAction.java index 949e68646184c..248c256941d98 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricInfoTransportAction.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricInfoTransportAction.java @@ -9,8 +9,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; @@ -19,12 +17,7 @@ public class AggregateMetricInfoTransportAction extends XPackInfoFeatureTransportAction { @Inject - public AggregateMetricInfoTransportAction( - TransportService transportService, - ActionFilters actionFilters, - Settings settings, - XPackLicenseState licenseState - ) { + public AggregateMetricInfoTransportAction(TransportService transportService, ActionFilters actionFilters) { super(XPackInfoFeatureAction.AGGREGATE_METRIC.name(), transportService, actionFilters); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricUsageTransportAction.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricUsageTransportAction.java index f2f10c50ee267..45069a58bec19 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricUsageTransportAction.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/AggregateMetricUsageTransportAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -32,9 +30,7 @@ public AggregateMetricUsageTransportAction( ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - Settings settings, - XPackLicenseState licenseState + IndexNameExpressionResolver indexNameExpressionResolver ) { super( XPackUsageFeatureAction.AGGREGATE_METRIC.name(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 6b605e0438b43..1ddb7d84208d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -75,7 +75,6 @@ public class TransportCloseJobAction extends TransportTasksAction< private final ThreadPool threadPool; private final Client client; - private final ClusterService clusterService; private final AnomalyDetectionAuditor auditor; private final PersistentTasksService persistentTasksService; private final JobConfigProvider jobConfigProvider; @@ -106,7 +105,6 @@ public TransportCloseJobAction( ); this.threadPool = threadPool; this.client = client; - this.clusterService = clusterService; this.auditor = auditor; this.persistentTasksService = persistentTasksService; this.jobConfigProvider = jobConfigProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 4ae6512fcaff4..46aa219db9fbe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -60,7 +60,6 @@ public class TransportGetJobsStatsAction extends TransportTasksAction< private static final Logger logger = LogManager.getLogger(TransportGetJobsStatsAction.class); - private final ClusterService clusterService; private final AutodetectProcessManager processManager; private final JobResultsProvider jobResultsProvider; private final JobConfigProvider jobConfigProvider; @@ -86,7 +85,6 @@ public TransportGetJobsStatsAction( in -> new QueryPage<>(in, JobStats::new), threadPool.executor(ThreadPool.Names.MANAGEMENT) ); - this.clusterService = clusterService; this.processManager = processManager; this.jobResultsProvider = jobResultsProvider; this.jobConfigProvider = jobConfigProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java index 4f4eee6e5c597..d9aeb8004cc7d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java @@ -66,7 +66,6 @@ public class TransportSetUpgradeModeAction extends AcknowledgedTransportMasterNo private final AtomicBoolean isRunning = new AtomicBoolean(false); private final PersistentTasksClusterService persistentTasksClusterService; private final PersistentTasksService persistentTasksService; - private final ClusterService clusterService; private final OriginSettingClient client; @Inject @@ -91,7 +90,6 @@ public TransportSetUpgradeModeAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.persistentTasksClusterService = persistentTasksClusterService; - this.clusterService = clusterService; this.client = new OriginSettingClient(client, ML_ORIGIN); this.persistentTasksService = persistentTasksService; } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 6ce448bdd63d5..1868b53bfd7e9 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -336,11 +336,7 @@ public Collection createComponents(PluginServices services) { ); this.frozenCacheService.set(sharedBlobCacheService); components.add(cacheService); - final BlobStoreCacheService blobStoreCacheService = new BlobStoreCacheService( - clusterService, - client, - SNAPSHOT_BLOB_CACHE_INDEX - ); + final BlobStoreCacheService blobStoreCacheService = new BlobStoreCacheService(client, SNAPSHOT_BLOB_CACHE_INDEX); this.blobStoreCacheService.set(blobStoreCacheService); clusterService.addListener( new BlobStoreCacheMaintenanceService(settings, clusterService, threadPool, client, SNAPSHOT_BLOB_CACHE_INDEX) diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java index a7ba0294d5c98..c14f1b3bed7bd 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java @@ -23,7 +23,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; @@ -63,17 +62,15 @@ public class BlobStoreCacheService extends AbstractLifecycleComponent { static final int MAX_IN_FLIGHT_CACHE_FILLS = Integer.MAX_VALUE; - private final ClusterService clusterService; private final Semaphore inFlightCacheFills; private final AtomicBoolean closed; private final Client client; private final String index; - public BlobStoreCacheService(ClusterService clusterService, Client client, String index) { + public BlobStoreCacheService(Client client, String index) { this.client = new OriginSettingClient(client, SEARCHABLE_SNAPSHOTS_ORIGIN); this.inFlightCacheFills = new Semaphore(MAX_IN_FLIGHT_CACHE_FILLS); this.closed = new AtomicBoolean(false); - this.clusterService = clusterService; this.index = index; } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheServiceTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheServiceTests.java index 8083d3c729ba3..bb400f11d8a71 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheServiceTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheServiceTests.java @@ -100,7 +100,7 @@ public void testGetWhenServiceNotStarted() { return null; }).when(mockClient).execute(eq(TransportGetAction.TYPE), any(GetRequest.class), any(ActionListener.class)); - BlobStoreCacheService blobCacheService = new BlobStoreCacheService(null, mockClient, SNAPSHOT_BLOB_CACHE_INDEX); + BlobStoreCacheService blobCacheService = new BlobStoreCacheService(mockClient, SNAPSHOT_BLOB_CACHE_INDEX); blobCacheService.start(); PlainActionFuture future = new PlainActionFuture<>(); @@ -132,7 +132,7 @@ public void testPutWhenServiceNotStarted() { return null; }).when(mockClient).execute(eq(TransportIndexAction.TYPE), any(IndexRequest.class), any(ActionListener.class)); - BlobStoreCacheService blobCacheService = new BlobStoreCacheService(null, mockClient, SNAPSHOT_BLOB_CACHE_INDEX); + BlobStoreCacheService blobCacheService = new BlobStoreCacheService(mockClient, SNAPSHOT_BLOB_CACHE_INDEX); blobCacheService.start(); PlainActionFuture future = new PlainActionFuture<>(); @@ -170,7 +170,7 @@ public void testWaitForInFlightCacheFillsToComplete() throws Exception { return null; }).when(mockClient).execute(eq(TransportIndexAction.TYPE), any(IndexRequest.class), any(ActionListener.class)); - final BlobStoreCacheService blobCacheService = new BlobStoreCacheService(null, mockClient, SNAPSHOT_BLOB_CACHE_INDEX); + final BlobStoreCacheService blobCacheService = new BlobStoreCacheService(mockClient, SNAPSHOT_BLOB_CACHE_INDEX); blobCacheService.start(); assertThat(blobCacheService.getInFlightCacheFills(), equalTo(0)); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java index d1bcf842b7c83..9cdb683c36288 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/cache/common/TestUtils.java @@ -267,7 +267,7 @@ private UnsupportedOperationException unsupportedException() { public static class NoopBlobStoreCacheService extends BlobStoreCacheService { public NoopBlobStoreCacheService() { - super(null, mock(Client.class), SNAPSHOT_BLOB_CACHE_INDEX); + super(mock(Client.class), SNAPSHOT_BLOB_CACHE_INDEX); } @Override @@ -291,7 +291,7 @@ public static class SimpleBlobStoreCacheService extends BlobStoreCacheService { private final ConcurrentHashMap blobs = new ConcurrentHashMap<>(); public SimpleBlobStoreCacheService() { - super(null, mock(Client.class), SNAPSHOT_BLOB_CACHE_INDEX); + super(mock(Client.class), SNAPSHOT_BLOB_CACHE_INDEX); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 50c6821a68cab..de35eb6d31d1c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1019,8 +1019,8 @@ Collection createComponents( if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { requestInterceptors.addAll( Arrays.asList( - new SearchRequestInterceptor(threadPool, getLicenseState(), clusterService), - new ShardSearchRequestInterceptor(threadPool, getLicenseState(), clusterService), + new SearchRequestInterceptor(threadPool, getLicenseState()), + new ShardSearchRequestInterceptor(threadPool, getLicenseState()), new UpdateRequestInterceptor(threadPool, getLicenseState()), new BulkShardRequestInterceptor(threadPool, getLicenseState()), new DlsFlsLicenseRequestInterceptor(threadPool.getThreadContext(), getLicenseState()), @@ -1484,7 +1484,7 @@ public List getRestHandlers( new RestPutUserAction(settings, getLicenseState()), new RestDeleteUserAction(settings, getLicenseState()), new RestGetRolesAction(settings, getLicenseState()), - new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get(), fileRolesStore.get()), + new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get()), new RestDeleteRoleAction(settings, getLicenseState()), new RestChangePasswordAction(settings, securityContext.get(), getLicenseState()), new RestSetEnabledAction(settings, getLicenseState()), @@ -1525,7 +1525,7 @@ public List getRestHandlers( new RestGetServiceAccountAction(settings, getLicenseState()), new RestKibanaEnrollAction(settings, getLicenseState()), new RestNodeEnrollmentAction(settings, getLicenseState()), - new RestProfileHasPrivilegesAction(settings, securityContext.get(), getLicenseState()), + new RestProfileHasPrivilegesAction(settings, getLicenseState()), new RestGetProfilesAction(settings, getLicenseState()), new RestActivateProfileAction(settings, getLicenseState()), new RestUpdateProfileDataAction(settings, getLicenseState()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesAction.java index aa3f36a9e6073..1506fba51089e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesResponse; @@ -52,7 +51,6 @@ public class TransportProfileHasPrivilegesAction extends HandledTransportAction< private final AuthorizationService authorizationService; private final NativePrivilegeStore privilegeStore; private final ProfileService profileService; - private final SecurityContext securityContext; private final ThreadPool threadPool; @Inject @@ -62,7 +60,6 @@ public TransportProfileHasPrivilegesAction( AuthorizationService authorizationService, NativePrivilegeStore privilegeStore, ProfileService profileService, - SecurityContext securityContext, ThreadPool threadPool ) { super( @@ -75,7 +72,6 @@ public TransportProfileHasPrivilegesAction( this.authorizationService = authorizationService; this.privilegeStore = privilegeStore; this.profileService = profileService; - this.securityContext = securityContext; this.threadPool = threadPool; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java index eadae3bfc0baf..229ca9f74024d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java @@ -30,18 +30,11 @@ public class TransportGetRolesAction extends TransportAction { private final NativeRolesStore nativeRolesStore; - private final ReservedRolesStore reservedRolesStore; @Inject - public TransportGetRolesAction( - ActionFilters actionFilters, - NativeRolesStore nativeRolesStore, - TransportService transportService, - ReservedRolesStore reservedRolesStore - ) { + public TransportGetRolesAction(ActionFilters actionFilters, NativeRolesStore nativeRolesStore, TransportService transportService) { super(GetRolesAction.NAME, actionFilters, transportService.getTaskManager()); this.nativeRolesStore = nativeRolesStore; - this.reservedRolesStore = reservedRolesStore; } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java index 279697ac1eb4f..216444cf51845 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.security.action.settings; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsClusterStateUpdateRequest; import org.elasticsearch.action.support.ActionFilters; @@ -46,7 +44,6 @@ public class TransportUpdateSecuritySettingsAction extends TransportMasterNodeAction< UpdateSecuritySettingsAction.Request, AcknowledgedResponse> { - private static final Logger logger = LogManager.getLogger(TransportUpdateSecuritySettingsAction.class); private final MetadataUpdateSettingsService updateSettingsService; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java index f9468b53c9d0c..571e8a00d5c49 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; @@ -20,11 +19,8 @@ public class SearchRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { - private final ClusterService clusterService; - - public SearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState, ClusterService clusterService) { + public SearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { super(threadPool.getThreadContext(), licenseState); - this.clusterService = clusterService; } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptor.java index 9f8dd0de50d83..e0ff01524c3d4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptor.java @@ -11,7 +11,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.threadpool.ThreadPool; @@ -25,11 +24,8 @@ public class ShardSearchRequestInterceptor extends FieldAndDocumentLevelSecurity private static final Logger logger = LogManager.getLogger(ShardSearchRequestInterceptor.class); - private final ClusterService clusterService; - - public ShardSearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState, ClusterService clusterService) { + public ShardSearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { super(threadPool.getThreadContext(), licenseState); - this.clusterService = clusterService; } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java index 75b2435fa7505..82596738e95a7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequestBuilder; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequestBuilderFactory; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; -import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import java.io.IOException; import java.util.List; @@ -35,17 +34,10 @@ public class RestPutRoleAction extends NativeRoleBaseRestHandler { private final PutRoleRequestBuilderFactory builderFactory; - private final FileRolesStore fileRolesStore; - public RestPutRoleAction( - Settings settings, - XPackLicenseState licenseState, - PutRoleRequestBuilderFactory builderFactory, - FileRolesStore fileRolesStore - ) { + public RestPutRoleAction(Settings settings, XPackLicenseState licenseState, PutRoleRequestBuilderFactory builderFactory) { super(settings, licenseState); this.builderFactory = builderFactory; - this.fileRolesStore = fileRolesStore; } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java index 24ed018a783ec..77c35438d0d0b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesRequest; import org.elasticsearch.xpack.security.Security; @@ -33,11 +32,8 @@ @ServerlessScope(Scope.INTERNAL) public class RestProfileHasPrivilegesAction extends SecurityBaseRestHandler { - private final SecurityContext securityContext; - - public RestProfileHasPrivilegesAction(Settings settings, SecurityContext securityContext, XPackLicenseState licenseState) { + public RestProfileHasPrivilegesAction(Settings settings, XPackLicenseState licenseState) { super(settings, licenseState); - this.securityContext = securityContext; } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesActionTests.java index 4155e6d4168cb..ad1a05042cf5a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/profile/TransportProfileHasPrivilegesActionTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.ResultsAndErrors; -import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -69,31 +68,25 @@ public class TransportProfileHasPrivilegesActionTests extends ESTestCase { private ThreadPool threadPool; - private TransportService transportService; - private ActionFilters actionFilters; private AuthorizationService authorizationService; private NativePrivilegeStore nativePrivilegeStore; private ProfileService profileService; - private SecurityContext securityContext; private TransportProfileHasPrivilegesAction transportProfileHasPrivilegesAction; @Before public void setup() { threadPool = new TestThreadPool(TransportProfileHasPrivilegesActionTests.class.getSimpleName()); - transportService = mock(TransportService.class); + TransportService transportService = mock(TransportService.class); when(transportService.getThreadPool()).thenReturn(threadPool); - actionFilters = mock(ActionFilters.class); authorizationService = mock(AuthorizationService.class); nativePrivilegeStore = mock(NativePrivilegeStore.class); profileService = mock(ProfileService.class); - securityContext = mock(SecurityContext.class); transportProfileHasPrivilegesAction = new TransportProfileHasPrivilegesAction( transportService, - actionFilters, + mock(ActionFilters.class), authorizationService, nativePrivilegeStore, profileService, - securityContext, threadPool ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 0348ff6df90b2..f1b1f194e5fbf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -67,12 +67,7 @@ public void testReservedRoles() { null, Collections.emptySet() ); - TransportGetRolesAction action = new TransportGetRolesAction( - mock(ActionFilters.class), - rolesStore, - transportService, - new ReservedRolesStore() - ); + TransportGetRolesAction action = new TransportGetRolesAction(mock(ActionFilters.class), rolesStore, transportService); final int size = randomIntBetween(1, ReservedRolesStore.names().size()); final List names = randomSubsetOf(size, ReservedRolesStore.names()); @@ -144,12 +139,7 @@ private void testStoreRoles(List storeRoleDescriptors) { null, Collections.emptySet() ); - TransportGetRolesAction action = new TransportGetRolesAction( - mock(ActionFilters.class), - rolesStore, - transportService, - new ReservedRolesStore() - ); + TransportGetRolesAction action = new TransportGetRolesAction(mock(ActionFilters.class), rolesStore, transportService); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); @@ -210,12 +200,7 @@ public void testGetAllOrMix() { null, Collections.emptySet() ); - TransportGetRolesAction action = new TransportGetRolesAction( - mock(ActionFilters.class), - rolesStore, - transportService, - new ReservedRolesStore() - ); + TransportGetRolesAction action = new TransportGetRolesAction(mock(ActionFilters.class), rolesStore, transportService); final List expectedNames = new ArrayList<>(); if (all) { @@ -301,12 +286,7 @@ public void testGetWithNativeOnly() { null, Collections.emptySet() ); - final TransportGetRolesAction action = new TransportGetRolesAction( - mock(ActionFilters.class), - rolesStore, - transportService, - new ReservedRolesStore() - ); + final TransportGetRolesAction action = new TransportGetRolesAction(mock(ActionFilters.class), rolesStore, transportService); final GetRolesRequest request = new GetRolesRequest(); request.names(requestedNames.toArray(Strings.EMPTY_ARRAY)); @@ -378,12 +358,7 @@ public void testException() { null, Collections.emptySet() ); - TransportGetRolesAction action = new TransportGetRolesAction( - mock(ActionFilters.class), - rolesStore, - transportService, - new ReservedRolesStore() - ); + TransportGetRolesAction action = new TransportGetRolesAction(mock(ActionFilters.class), rolesStore, transportService); GetRolesRequest request = new GetRolesRequest(); request.names(storeRoleDescriptors.stream().map(RoleDescriptor::getName).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptorTests.java index 6bb9d6087cdaf..94de09707300f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptorTests.java @@ -44,7 +44,7 @@ public void init() { licenseState = mock(MockLicenseState.class); when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(true); clusterService = mock(ClusterService.class); - interceptor = new SearchRequestInterceptor(threadPool, licenseState, clusterService); + interceptor = new SearchRequestInterceptor(threadPool, licenseState); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptorTests.java index 8180142a255c5..a1d5e978d7355 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ShardSearchRequestInterceptorTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.MockLicenseState; @@ -36,18 +35,15 @@ public class ShardSearchRequestInterceptorTests extends ESTestCase { - private ClusterService clusterService; private ThreadPool threadPool; - private MockLicenseState licenseState; private ShardSearchRequestInterceptor interceptor; @Before public void init() { threadPool = new TestThreadPool("shard search request interceptor tests"); - licenseState = mock(MockLicenseState.class); + MockLicenseState licenseState = mock(MockLicenseState.class); when(licenseState.isAllowed(DOCUMENT_LEVEL_SECURITY_FEATURE)).thenReturn(true); - clusterService = mock(ClusterService.class); - interceptor = new ShardSearchRequestInterceptor(threadPool, licenseState, clusterService); + interceptor = new ShardSearchRequestInterceptor(threadPool, licenseState); } @After @@ -57,7 +53,6 @@ public void stopThreadPool() { private void configureMinMondeVersion(Version version) { final ClusterState clusterState = mock(ClusterState.class); - when(clusterService.state()).thenReturn(clusterState); final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); when(clusterState.nodes()).thenReturn(discoveryNodes); when(discoveryNodes.getMinNodeVersion()).thenReturn(version); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleActionTests.java index ad20576e7f8af..e79225e5610c7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/role/RestPutRoleActionTests.java @@ -34,7 +34,7 @@ public void testFailureWhenNativeRolesDisabled() throws Exception { final Settings securityDisabledSettings = Settings.builder().put(NativeRolesStore.NATIVE_ROLES_ENABLED, false).build(); final XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.getOperationMode()).thenReturn(License.OperationMode.BASIC); - final RestPutRoleAction action = new RestPutRoleAction(securityDisabledSettings, licenseState, mock(), mock()); + final RestPutRoleAction action = new RestPutRoleAction(securityDisabledSettings, licenseState, mock()); final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) // .withParams(Map.of("name", "dice")) .withContent(new BytesArray("{ }"), XContentType.JSON) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesActionTests.java index 47a6da05010c9..10fab8b2128b8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestProfileHasPrivilegesActionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.security.Security; import org.junit.Before; @@ -22,7 +21,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -36,7 +34,6 @@ public void init() { licenseState = MockLicenseState.createMock(); restProfileHasPrivilegesAction = new RestProfileHasPrivilegesAction( Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(), - mock(SecurityContext.class), licenseState ); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlInfoTransportAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlInfoTransportAction.java index d7908b93b3323..f647c0016cb75 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlInfoTransportAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlInfoTransportAction.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; @@ -16,12 +15,9 @@ public class SqlInfoTransportAction extends XPackInfoFeatureTransportAction { - private final XPackLicenseState licenseState; - @Inject - public SqlInfoTransportAction(TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + public SqlInfoTransportAction(TransportService transportService, ActionFilters actionFilters) { super(XPackInfoFeatureAction.SQL.name(), transportService, actionFilters); - this.licenseState = licenseState; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java index 9e00e7ed4d485..5fccc4a3b1214 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/SqlUsageTransportAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -32,7 +31,6 @@ import java.util.stream.Collectors; public class SqlUsageTransportAction extends XPackUsageFeatureTransportAction { - private final XPackLicenseState licenseState; private final Client client; @Inject @@ -42,11 +40,9 @@ public SqlUsageTransportAction( ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - XPackLicenseState licenseState, Client client ) { super(XPackUsageFeatureAction.SQL.name(), transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver); - this.licenseState = licenseState; this.client = client; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java index 01bad2c3dee60..c663a48895a3a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/SqlInfoTransportActionTests.java @@ -59,7 +59,7 @@ public void init() throws Exception { public void testAvailable() { TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(); - SqlInfoTransportAction featureSet = new SqlInfoTransportAction(transportService, mock(ActionFilters.class), licenseState); + SqlInfoTransportAction featureSet = new SqlInfoTransportAction(transportService, mock(ActionFilters.class)); assertThat(featureSet.available(), is(true)); } @@ -101,7 +101,6 @@ public void testUsageStats() throws Exception { threadPool, mock(ActionFilters.class), null, - licenseState, client ); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java index 48bea5c5b13b9..8553297ab9cde 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java @@ -19,17 +19,13 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction.Request; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction.Response; @@ -50,13 +46,10 @@ public class TransportScheduleNowTransformAction extends TransportTasksAction Date: Sat, 20 Apr 2024 20:01:04 +0200 Subject: [PATCH 121/130] Remove some more ActionType implementations (#107664) Cleaning up a couple more of these. --- .../http/ClusterStatsRestCancellationIT.java | 8 +- .../RestGetSnapshotsCancellationIT.java | 8 +- .../RestSnapshotsStatusCancellationIT.java | 8 +- .../coordination/InitialClusterStateIT.java | 4 +- .../repositories/IndexSnapshotsServiceIT.java | 4 +- .../elasticsearch/action/ActionModule.java | 24 ++--- .../cluster/reroute/ClusterRerouteAction.java | 21 ----- .../reroute/ClusterRerouteRequestBuilder.java | 2 +- .../TransportClusterRerouteAction.java | 4 +- .../shards/ClusterSearchShardsAction.java | 21 ----- .../ClusterSearchShardsRequestBuilder.java | 2 +- .../TransportClusterSearchShardsAction.java | 5 +- .../create/CreateSnapshotAction.java | 24 ----- .../create/CreateSnapshotRequestBuilder.java | 2 +- .../create/TransportCreateSnapshotAction.java | 4 +- .../snapshots/get/GetSnapshotsAction.java | 25 ------ .../get/GetSnapshotsRequestBuilder.java | 2 +- .../get/TransportGetSnapshotsAction.java | 6 +- .../get/shard/GetShardSnapshotAction.java | 21 ----- .../TransportGetShardSnapshotAction.java | 5 +- .../restore/RestoreSnapshotAction.java | 24 ----- .../RestoreSnapshotRequestBuilder.java | 2 +- .../TransportRestoreSnapshotAction.java | 4 +- .../status/SnapshotsStatusAction.java | 24 ----- .../status/SnapshotsStatusRequestBuilder.java | 4 +- .../status/TransportNodesSnapshotsStatus.java | 2 +- .../TransportSnapshotsStatusAction.java | 4 +- .../cluster/stats/ClusterStatsAction.java | 21 ----- .../stats/ClusterStatsRequestBuilder.java | 2 +- .../stats/TransportClusterStatsAction.java | 4 +- .../action/search/TransportSearchAction.java | 4 +- .../internal/support/AbstractClient.java | 34 ++++---- .../recovery/plan/ShardSnapshotsService.java | 4 +- .../AbstractClientHeadersTestCase.java | 26 +++--- .../snapshots/SnapshotResiliencyTests.java | 9 +- .../transport/TransportLoggerTests.java | 4 +- .../privilege/ClusterPrivilegeResolver.java | 20 ++--- .../authz/privilege/IndexPrivilege.java | 6 +- .../core/ilm/CreateSnapshotStepTests.java | 4 +- .../authz/privilege/PrivilegeTests.java | 12 +-- .../authz/store/ReservedRolesStoreTests.java | 87 ++++++++++--------- .../ilm/LifecycleOperationSnapshotTests.java | 8 +- ...ableSnapshotsUuidValidationIntegTests.java | 4 +- .../security/CrossClusterShardTests.java | 4 +- .../HasPrivilegesRequestBuilderTests.java | 9 +- .../xpack/security/authz/RBACEngineTests.java | 4 +- .../authz/store/CompositeRolesStoreTests.java | 6 +- .../slm/SLMSnapshotBlockingIntegTests.java | 4 +- .../xpack/slm/SnapshotLifecycleTaskTests.java | 7 +- 49 files changed, 196 insertions(+), 351 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java index c864c90128b8f..53f4ae5c1bc37 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/ClusterStatsRestCancellationIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.http; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; @@ -104,7 +104,7 @@ public void testClusterStateRestCancellation() throws Exception { logger.info("--> sending cluster state request"); final Cancellable cancellable = getRestClient().performRequestAsync(clusterStatsRequest, wrapAsRestResponseListener(future)); - awaitTaskWithPrefix(ClusterStatsAction.NAME); + awaitTaskWithPrefix(TransportClusterStatsAction.TYPE.name()); logger.info("--> waiting for at least one task to hit a block"); assertBusy(() -> assertTrue(statsBlocks.stream().anyMatch(Semaphore::hasQueuedThreads))); @@ -113,12 +113,12 @@ public void testClusterStateRestCancellation() throws Exception { cancellable.cancel(); expectThrows(CancellationException.class, future::actionGet); - assertAllCancellableTasksAreCancelled(ClusterStatsAction.NAME); + assertAllCancellableTasksAreCancelled(TransportClusterStatsAction.TYPE.name()); } finally { Releasables.close(releasables); } - assertAllTasksHaveFinished(ClusterStatsAction.NAME); + assertAllTasksHaveFinished(TransportClusterStatsAction.TYPE.name()); } public static class StatsBlockingPlugin extends Plugin implements EnginePlugin { diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsCancellationIT.java index 317ac8bbf7635..fbaae0961dfcd 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsCancellationIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsCancellationIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.http.snapshots; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; @@ -48,13 +48,13 @@ public void testGetSnapshotsCancellation() throws Exception { final Cancellable cancellable = getRestClient().performRequestAsync(request, wrapAsRestResponseListener(future)); assertThat(future.isDone(), equalTo(false)); - awaitTaskWithPrefix(GetSnapshotsAction.NAME); + awaitTaskWithPrefix(TransportGetSnapshotsAction.TYPE.name()); assertBusy(() -> assertTrue(repository.blocked()), 30L, TimeUnit.SECONDS); cancellable.cancel(); - assertAllCancellableTasksAreCancelled(GetSnapshotsAction.NAME); + assertAllCancellableTasksAreCancelled(TransportGetSnapshotsAction.TYPE.name()); repository.unblock(); expectThrows(CancellationException.class, future::actionGet); - assertAllTasksHaveFinished(GetSnapshotsAction.NAME); + assertAllTasksHaveFinished(TransportGetSnapshotsAction.TYPE.name()); } } diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestSnapshotsStatusCancellationIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestSnapshotsStatusCancellationIT.java index 840fcec3c4a20..3c842d67a78f6 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestSnapshotsStatusCancellationIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestSnapshotsStatusCancellationIT.java @@ -9,7 +9,7 @@ package org.elasticsearch.http.snapshots; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; +import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; @@ -54,13 +54,13 @@ public void testSnapshotStatusCancellation() throws Exception { final Cancellable cancellable = getRestClient().performRequestAsync(request, wrapAsRestResponseListener(future)); assertFalse(future.isDone()); - awaitTaskWithPrefix(SnapshotsStatusAction.NAME); + awaitTaskWithPrefix(TransportSnapshotsStatusAction.TYPE.name()); assertBusy(() -> assertTrue(repository.blocked()), 30L, TimeUnit.SECONDS); cancellable.cancel(); - assertAllCancellableTasksAreCancelled(SnapshotsStatusAction.NAME); + assertAllCancellableTasksAreCancelled(TransportSnapshotsStatusAction.TYPE.name()); repository.unblock(); expectThrows(CancellationException.class, future::actionGet); - assertAllTasksHaveFinished(SnapshotsStatusAction.NAME); + assertAllTasksHaveFinished(TransportSnapshotsStatusAction.TYPE.name()); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InitialClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InitialClusterStateIT.java index 83d450fe8e667..3cd7ce60d9035 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InitialClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InitialClusterStateIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.cluster.coordination; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -41,7 +41,7 @@ private static void assertClusterUuid(boolean expectCommitted, String expectedVa assertEquals(expectedValue, metadata.clusterUUID()); final ClusterStatsResponse response = PlainActionFuture.get( - fut -> client(nodeName).execute(ClusterStatsAction.INSTANCE, new ClusterStatsRequest(), fut), + fut -> client(nodeName).execute(TransportClusterStatsAction.TYPE, new ClusterStatsRequest(), fut), 10, TimeUnit.SECONDS ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java index 4b9e4e0fa0932..ea2c221c8c4a4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java @@ -10,9 +10,9 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.get.shard.TransportGetShardSnapshotAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -346,7 +346,7 @@ private PlainActionFuture getLatestSnapshotForShardFut request = GetShardSnapshotRequest.latestSnapshotInRepositories(shardId, repositories); } - client().execute(GetShardSnapshotAction.INSTANCE, request, future); + client().execute(TransportGetShardSnapshotAction.TYPE, request, future); return future; } } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index e8588e738569f..ef73d0470b43e 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -49,34 +49,26 @@ import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.TransportVerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.TransportClusterGetSettingsAction; import org.elasticsearch.action.admin.cluster.settings.TransportClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.snapshots.clone.TransportCloneSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateAction; import org.elasticsearch.action.admin.cluster.snapshots.features.SnapshottableFeaturesAction; import org.elasticsearch.action.admin.cluster.snapshots.features.TransportResetFeatureStateAction; import org.elasticsearch.action.admin.cluster.snapshots.features.TransportSnapshottableFeaturesAction; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; -import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.shard.TransportGetShardSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.snapshots.status.TransportNodesSnapshotsStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetScriptContextAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetScriptLanguageAction; @@ -641,13 +633,13 @@ public void reg actions.register(TransportGetAllocationStatsAction.TYPE, TransportGetAllocationStatsAction.class); actions.register(TransportGetDesiredBalanceAction.TYPE, TransportGetDesiredBalanceAction.class); actions.register(TransportDeleteDesiredBalanceAction.TYPE, TransportDeleteDesiredBalanceAction.class); - actions.register(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class); + actions.register(TransportClusterStatsAction.TYPE, TransportClusterStatsAction.class); actions.register(ClusterStateAction.INSTANCE, TransportClusterStateAction.class); actions.register(TransportClusterHealthAction.TYPE, TransportClusterHealthAction.class); actions.register(ClusterUpdateSettingsAction.INSTANCE, TransportClusterUpdateSettingsAction.class); actions.register(ClusterGetSettingsAction.INSTANCE, TransportClusterGetSettingsAction.class); - actions.register(ClusterRerouteAction.INSTANCE, TransportClusterRerouteAction.class); - actions.register(ClusterSearchShardsAction.INSTANCE, TransportClusterSearchShardsAction.class); + actions.register(TransportClusterRerouteAction.TYPE, TransportClusterRerouteAction.class); + actions.register(TransportClusterSearchShardsAction.TYPE, TransportClusterSearchShardsAction.class); actions.register(ClusterFormationInfoAction.INSTANCE, ClusterFormationInfoAction.TransportAction.class); actions.register(TransportPendingClusterTasksAction.TYPE, TransportPendingClusterTasksAction.class); actions.register(TransportPutRepositoryAction.TYPE, TransportPutRepositoryAction.class); @@ -655,17 +647,17 @@ public void reg actions.register(TransportDeleteRepositoryAction.TYPE, TransportDeleteRepositoryAction.class); actions.register(VerifyRepositoryAction.INSTANCE, TransportVerifyRepositoryAction.class); actions.register(TransportCleanupRepositoryAction.TYPE, TransportCleanupRepositoryAction.class); - actions.register(GetSnapshotsAction.INSTANCE, TransportGetSnapshotsAction.class); + actions.register(TransportGetSnapshotsAction.TYPE, TransportGetSnapshotsAction.class); actions.register(TransportDeleteSnapshotAction.TYPE, TransportDeleteSnapshotAction.class); - actions.register(CreateSnapshotAction.INSTANCE, TransportCreateSnapshotAction.class); + actions.register(TransportCreateSnapshotAction.TYPE, TransportCreateSnapshotAction.class); actions.register(TransportCloneSnapshotAction.TYPE, TransportCloneSnapshotAction.class); - actions.register(RestoreSnapshotAction.INSTANCE, TransportRestoreSnapshotAction.class); - actions.register(SnapshotsStatusAction.INSTANCE, TransportSnapshotsStatusAction.class); + actions.register(TransportRestoreSnapshotAction.TYPE, TransportRestoreSnapshotAction.class); + actions.register(TransportSnapshotsStatusAction.TYPE, TransportSnapshotsStatusAction.class); actions.register(SnapshottableFeaturesAction.INSTANCE, TransportSnapshottableFeaturesAction.class); actions.register(ResetFeatureStateAction.INSTANCE, TransportResetFeatureStateAction.class); actions.register(GetFeatureUpgradeStatusAction.INSTANCE, TransportGetFeatureUpgradeStatusAction.class); actions.register(PostFeatureUpgradeAction.INSTANCE, TransportPostFeatureUpgradeAction.class); - actions.register(GetShardSnapshotAction.INSTANCE, TransportGetShardSnapshotAction.class); + actions.register(TransportGetShardSnapshotAction.TYPE, TransportGetShardSnapshotAction.class); actions.register(IndicesStatsAction.INSTANCE, TransportIndicesStatsAction.class); actions.register(IndicesSegmentsAction.INSTANCE, TransportIndicesSegmentsAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java deleted file mode 100644 index a09ef9e38bd19..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.reroute; - -import org.elasticsearch.action.ActionType; - -public class ClusterRerouteAction extends ActionType { - - public static final ClusterRerouteAction INSTANCE = new ClusterRerouteAction(); - public static final String NAME = "cluster:admin/reroute"; - - private ClusterRerouteAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java index 1926378dba791..2204fd08ae580 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java @@ -20,7 +20,7 @@ public class ClusterRerouteRequestBuilder extends AcknowledgedRequestBuilder< ClusterRerouteResponse, ClusterRerouteRequestBuilder> { public ClusterRerouteRequestBuilder(ElasticsearchClient client) { - super(client, ClusterRerouteAction.INSTANCE, new ClusterRerouteRequest()); + super(client, TransportClusterRerouteAction.TYPE, new ClusterRerouteRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index 2fc2f1cfde3b7..7eea49861333e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; @@ -51,6 +52,7 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/reroute"); private static final Logger logger = LogManager.getLogger(TransportClusterRerouteAction.class); private final AllocationService allocationService; @@ -65,7 +67,7 @@ public TransportClusterRerouteAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - ClusterRerouteAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java deleted file mode 100644 index f2d5e2bcecd63..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.shards; - -import org.elasticsearch.action.ActionType; - -public class ClusterSearchShardsAction extends ActionType { - - public static final ClusterSearchShardsAction INSTANCE = new ClusterSearchShardsAction(); - public static final String NAME = "indices:admin/shards/search_shards"; - - private ClusterSearchShardsAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java index 9f11a01fc4073..daae51f04f442 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java @@ -18,7 +18,7 @@ public class ClusterSearchShardsRequestBuilder extends MasterNodeReadOperationRe ClusterSearchShardsRequestBuilder> { public ClusterSearchShardsRequestBuilder(ElasticsearchClient client) { - super(client, ClusterSearchShardsAction.INSTANCE, new ClusterSearchShardsRequest()); + super(client, TransportClusterSearchShardsAction.TYPE, new ClusterSearchShardsRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 826fa453e0402..b7164f81c71ac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.shards; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; @@ -38,6 +39,8 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA ClusterSearchShardsRequest, ClusterSearchShardsResponse> { + public static final ActionType TYPE = new ActionType<>("indices:admin/shards/search_shards"); + private final IndicesService indicesService; @Inject @@ -50,7 +53,7 @@ public TransportClusterSearchShardsAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - ClusterSearchShardsAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java deleted file mode 100644 index d24fbbb63246b..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.create; - -import org.elasticsearch.action.ActionType; - -/** - * Create snapshot action - */ -public class CreateSnapshotAction extends ActionType { - - public static final CreateSnapshotAction INSTANCE = new CreateSnapshotAction(); - public static final String NAME = "cluster:admin/snapshot/create"; - - private CreateSnapshotAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java index b2cd01b4d2016..7f093b577fd57 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java @@ -27,7 +27,7 @@ public class CreateSnapshotRequestBuilder extends MasterNodeOperationRequestBuil * Constructs a new create snapshot request builder with specified repository and snapshot names */ public CreateSnapshotRequestBuilder(ElasticsearchClient client, String repository, String snapshot) { - super(client, CreateSnapshotAction.INSTANCE, new CreateSnapshotRequest(repository, snapshot)); + super(client, TransportCreateSnapshotAction.TYPE, new CreateSnapshotRequest(repository, snapshot)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 02592b722c9e0..03810f027363f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -28,6 +29,7 @@ * Transport action for create snapshot operation */ public class TransportCreateSnapshotAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/snapshot/create"); private final SnapshotsService snapshotsService; @Inject @@ -40,7 +42,7 @@ public TransportCreateSnapshotAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - CreateSnapshotAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java deleted file mode 100644 index a484b5c9300a5..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.get; - -import org.elasticsearch.action.ActionType; - -/** - * Get snapshots action - */ -public class GetSnapshotsAction extends ActionType { - - public static final GetSnapshotsAction INSTANCE = new GetSnapshotsAction(); - public static final String NAME = "cluster:admin/snapshot/get"; - - private GetSnapshotsAction() { - super(NAME); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java index 68877f6144693..f3ef2fa0bda1e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java @@ -26,7 +26,7 @@ public class GetSnapshotsRequestBuilder extends MasterNodeOperationRequestBuilde * Constructs the new get snapshot request with specified repositories */ public GetSnapshotsRequestBuilder(ElasticsearchClient client, String... repositories) { - super(client, GetSnapshotsAction.INSTANCE, new GetSnapshotsRequest(repositories)); + super(client, TransportGetSnapshotsAction.TYPE, new GetSnapshotsRequest(repositories)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 190c4c565f1b7..dd08746236fed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; @@ -72,6 +73,7 @@ */ public class TransportGetSnapshotsAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/snapshot/get"); private static final Logger logger = LogManager.getLogger(TransportGetSnapshotsAction.class); private final RepositoriesService repositoriesService; @@ -86,7 +88,7 @@ public TransportGetSnapshotsAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - GetSnapshotsAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, @@ -690,7 +692,7 @@ private static class GetSnapshotInfoExecutor extends AbstractThrottledTaskRunner private final BooleanSupplier isCancelledSupplier; GetSnapshotInfoExecutor(int maxRunningTasks, BooleanSupplier isCancelledSupplier) { - super(GetSnapshotsAction.NAME, maxRunningTasks, EsExecutors.DIRECT_EXECUTOR_SERVICE, ConcurrentCollections.newBlockingQueue()); + super(TYPE.name(), maxRunningTasks, EsExecutors.DIRECT_EXECUTOR_SERVICE, ConcurrentCollections.newBlockingQueue()); this.maxRunningTasks = maxRunningTasks; this.isCancelledSupplier = isCancelledSupplier; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java deleted file mode 100644 index 7cb3440e422a5..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotAction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.get.shard; - -import org.elasticsearch.action.ActionType; - -public class GetShardSnapshotAction extends ActionType { - - public static final GetShardSnapshotAction INSTANCE = new GetShardSnapshotAction(); - public static final String NAME = "internal:admin/snapshot/get_shard"; - - public GetShardSnapshotAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/TransportGetShardSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/TransportGetShardSnapshotAction.java index 7b1c5f9a3e290..6933725476adf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/TransportGetShardSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/TransportGetShardSnapshotAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get.shard; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -38,6 +39,8 @@ public class TransportGetShardSnapshotAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("internal:admin/snapshot/get_shard"); + private final IndexSnapshotsService indexSnapshotsService; @Inject @@ -50,7 +53,7 @@ public TransportGetShardSnapshotAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - GetShardSnapshotAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java deleted file mode 100644 index 3e3916bab19ae..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.restore; - -import org.elasticsearch.action.ActionType; - -/** - * Restore snapshot action - */ -public class RestoreSnapshotAction extends ActionType { - - public static final RestoreSnapshotAction INSTANCE = new RestoreSnapshotAction(); - public static final String NAME = "cluster:admin/snapshot/restore"; - - private RestoreSnapshotAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java index 0dad986a86ab6..c42c82152c9df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java @@ -29,7 +29,7 @@ public class RestoreSnapshotRequestBuilder extends MasterNodeOperationRequestBui * Constructs new restore snapshot request builder with specified repository and snapshot names */ public RestoreSnapshotRequestBuilder(ElasticsearchClient client, String repository, String name) { - super(client, RestoreSnapshotAction.INSTANCE, new RestoreSnapshotRequest(repository, name)); + super(client, TransportRestoreSnapshotAction.TYPE, new RestoreSnapshotRequest(repository, name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index b7190de319d97..cc0d4cdfd9ee9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -27,6 +28,7 @@ * Transport action for restore snapshot operation */ public class TransportRestoreSnapshotAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/snapshot/restore"); private final RestoreService restoreService; @Inject @@ -39,7 +41,7 @@ public TransportRestoreSnapshotAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - RestoreSnapshotAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java deleted file mode 100644 index 16faab60b561f..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.snapshots.status; - -import org.elasticsearch.action.ActionType; - -/** - * Snapshots status action - */ -public class SnapshotsStatusAction extends ActionType { - - public static final SnapshotsStatusAction INSTANCE = new SnapshotsStatusAction(); - public static final String NAME = "cluster:admin/snapshot/status"; - - private SnapshotsStatusAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java index 26caf4307686b..dc2e68a9698ef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java @@ -24,14 +24,14 @@ public class SnapshotsStatusRequestBuilder extends MasterNodeOperationRequestBui * Constructs the new snapshot status request */ public SnapshotsStatusRequestBuilder(ElasticsearchClient client) { - super(client, SnapshotsStatusAction.INSTANCE, new SnapshotsStatusRequest()); + super(client, TransportSnapshotsStatusAction.TYPE, new SnapshotsStatusRequest()); } /** * Constructs the new snapshot status request with specified repository */ public SnapshotsStatusRequestBuilder(ElasticsearchClient client, String repository) { - super(client, SnapshotsStatusAction.INSTANCE, new SnapshotsStatusRequest(repository)); + super(client, TransportSnapshotsStatusAction.TYPE, new SnapshotsStatusRequest(repository)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 9215d97490629..202940dfe7f69 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -49,7 +49,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction< TransportNodesSnapshotsStatus.NodeRequest, TransportNodesSnapshotsStatus.NodeSnapshotStatus> { - public static final String ACTION_NAME = SnapshotsStatusAction.NAME + "[nodes]"; + public static final String ACTION_NAME = TransportSnapshotsStatusAction.TYPE.name() + "[nodes]"; public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private final SnapshotShardsService snapshotShardsService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index a82a64ca2f683..28f970eb8c9fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -65,6 +66,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/snapshot/status"); private static final Logger logger = LogManager.getLogger(TransportSnapshotsStatusAction.class); private final RepositoriesService repositoriesService; @@ -82,7 +84,7 @@ public TransportSnapshotsStatusAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - SnapshotsStatusAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java deleted file mode 100644 index f2e3547d08cda..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.stats; - -import org.elasticsearch.action.ActionType; - -public class ClusterStatsAction extends ActionType { - - public static final ClusterStatsAction INSTANCE = new ClusterStatsAction(); - public static final String NAME = "cluster:monitor/stats"; - - private ClusterStatsAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java index e82132b59e3fe..d6a28eaadd6c7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java @@ -17,6 +17,6 @@ public class ClusterStatsRequestBuilder extends NodesOperationRequestBuilder< ClusterStatsRequestBuilder> { public ClusterStatsRequestBuilder(ElasticsearchClient client) { - super(client, ClusterStatsAction.INSTANCE, new ClusterStatsRequest()); + super(client, TransportClusterStatsAction.TYPE, new ClusterStatsRequest()); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index d8fff551c0551..f1b6faaca439a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -10,6 +10,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -61,6 +62,7 @@ public class TransportClusterStatsAction extends TransportNodesAction< TransportClusterStatsAction.ClusterStatsNodeRequest, ClusterStatsNodeResponse> { + public static final ActionType TYPE = new ActionType<>("cluster:monitor/stats"); private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags( CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, @@ -89,7 +91,7 @@ public TransportClusterStatsAction( ActionFilters actionFilters ) { super( - ClusterStatsAction.NAME, + TYPE.name(), clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 3abb74ddd8ec5..51a8c6ddb3d76 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -20,9 +20,9 @@ import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.ResolvedIndices; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; @@ -759,7 +759,7 @@ Map createFinalResponse() { ).local(true).preference(preference).routing(routing); transportService.sendRequest( connection, - ClusterSearchShardsAction.NAME, + TransportClusterSearchShardsAction.TYPE.name(), searchShardsRequest, TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>( diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 26a8768a78e78..8a26a8b54c532 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -64,48 +64,48 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; +import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequestBuilder; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.snapshots.clone.CloneSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.clone.CloneSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.clone.TransportCloneSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; +import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequestBuilder; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; @@ -690,12 +690,12 @@ public ClusterStateRequestBuilder prepareState() { @Override public ActionFuture reroute(final ClusterRerouteRequest request) { - return execute(ClusterRerouteAction.INSTANCE, request); + return execute(TransportClusterRerouteAction.TYPE, request); } @Override public void reroute(final ClusterRerouteRequest request, final ActionListener listener) { - execute(ClusterRerouteAction.INSTANCE, request, listener); + execute(TransportClusterRerouteAction.TYPE, request, listener); } @Override @@ -758,7 +758,7 @@ public void nodesUsage(final NodesUsageRequest request, final ActionListener listener) { - execute(ClusterStatsAction.INSTANCE, request, listener); + execute(TransportClusterStatsAction.TYPE, request, listener); } @Override @@ -818,7 +818,7 @@ public CancelTasksRequestBuilder prepareCancelTasks(String... nodesIds) { @Override public void searchShards(final ClusterSearchShardsRequest request, final ActionListener listener) { - execute(ClusterSearchShardsAction.INSTANCE, request, listener); + execute(TransportClusterSearchShardsAction.TYPE, request, listener); } @Override @@ -838,12 +838,12 @@ public PutRepositoryRequestBuilder preparePutRepository(String name) { @Override public ActionFuture createSnapshot(CreateSnapshotRequest request) { - return execute(CreateSnapshotAction.INSTANCE, request); + return execute(TransportCreateSnapshotAction.TYPE, request); } @Override public void createSnapshot(CreateSnapshotRequest request, ActionListener listener) { - execute(CreateSnapshotAction.INSTANCE, request, listener); + execute(TransportCreateSnapshotAction.TYPE, request, listener); } @Override @@ -863,7 +863,7 @@ public void cloneSnapshot(CloneSnapshotRequest request, ActionListener listener) { - execute(GetSnapshotsAction.INSTANCE, request, listener); + execute(TransportGetSnapshotsAction.TYPE, request, listener); } @Override @@ -923,12 +923,12 @@ public void cleanupRepository(CleanupRepositoryRequest request, ActionListener restoreSnapshot(RestoreSnapshotRequest request) { - return execute(RestoreSnapshotAction.INSTANCE, request); + return execute(TransportRestoreSnapshotAction.TYPE, request); } @Override public void restoreSnapshot(RestoreSnapshotRequest request, ActionListener listener) { - execute(RestoreSnapshotAction.INSTANCE, request, listener); + execute(TransportRestoreSnapshotAction.TYPE, request, listener); } @Override @@ -938,7 +938,7 @@ public RestoreSnapshotRequestBuilder prepareRestoreSnapshot(String repository, S @Override public void snapshotsStatus(SnapshotsStatusRequest request, ActionListener listener) { - execute(SnapshotsStatusAction.INSTANCE, request, listener); + execute(TransportSnapshotsStatusAction.TYPE, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java b/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java index e15ec4c339a94..6e53b8416ebd3 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsService.java @@ -19,9 +19,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.shard.GetShardSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.get.shard.TransportGetShardSnapshotAction; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; @@ -93,7 +93,7 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener( threadPool.generic(), diff --git a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java index 0a490898b7fa7..dc0bd57731d98 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java @@ -11,9 +11,9 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; @@ -55,9 +55,9 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { TransportIndexAction.TYPE, // cluster admin actions - ClusterStatsAction.INSTANCE, - CreateSnapshotAction.INSTANCE, - ClusterRerouteAction.INSTANCE, + TransportClusterStatsAction.TYPE, + TransportCreateSnapshotAction.TYPE, + TransportClusterRerouteAction.TYPE, // indices admin actions TransportCreateIndexAction.TYPE, @@ -110,12 +110,18 @@ public void testActions() { .execute(new AssertingActionListener<>(TransportIndexAction.NAME, client.threadPool())); // choosing arbitrary cluster admin actions to test - client.admin().cluster().prepareClusterStats().execute(new AssertingActionListener<>(ClusterStatsAction.NAME, client.threadPool())); + client.admin() + .cluster() + .prepareClusterStats() + .execute(new AssertingActionListener<>(TransportClusterStatsAction.TYPE.name(), client.threadPool())); client.admin() .cluster() .prepareCreateSnapshot("repo", "bck") - .execute(new AssertingActionListener<>(CreateSnapshotAction.NAME, client.threadPool())); - client.admin().cluster().prepareReroute().execute(new AssertingActionListener<>(ClusterRerouteAction.NAME, client.threadPool())); + .execute(new AssertingActionListener<>(TransportCreateSnapshotAction.TYPE.name(), client.threadPool())); + client.admin() + .cluster() + .prepareReroute() + .execute(new AssertingActionListener<>(TransportClusterRerouteAction.TYPE.name(), client.threadPool())); // choosing arbitrary indices admin actions to test client.admin() @@ -142,7 +148,7 @@ public void testOverrideHeader() throws Exception { client.admin() .cluster() .prepareClusterStats() - .execute(new AssertingActionListener<>(ClusterStatsAction.NAME, expected, client.threadPool())); + .execute(new AssertingActionListener<>(TransportClusterStatsAction.TYPE.name(), expected, client.threadPool())); client.admin() .indices() diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 303493a2e5d2d..e5e7e19de0fa4 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -21,16 +21,13 @@ import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.clone.TransportCloneSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; @@ -2428,7 +2425,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { ) ); actions.put( - RestoreSnapshotAction.INSTANCE, + TransportRestoreSnapshotAction.TYPE, new TransportRestoreSnapshotAction( transportService, clusterService, @@ -2473,7 +2470,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { ) ); actions.put( - CreateSnapshotAction.INSTANCE, + TransportCreateSnapshotAction.TYPE, new TransportCreateSnapshotAction( transportService, clusterService, @@ -2495,7 +2492,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { ) ); actions.put( - ClusterRerouteAction.INSTANCE, + TransportClusterRerouteAction.TYPE, new TransportClusterRerouteAction( transportService, clusterService, diff --git a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java index 5d32300585279..e636e3c2d7d9c 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java @@ -10,8 +10,8 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; import org.elasticsearch.common.logging.Loggers; @@ -90,7 +90,7 @@ private BytesReference buildRequest() throws IOException { new ThreadContext(Settings.EMPTY), new ClusterStatsRequest(), TransportVersion.current(), - ClusterStatsAction.NAME, + TransportClusterStatsAction.TYPE.name(), randomInt(30), false, compress diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 3774efcdd2ad2..372b62cffeaea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -12,9 +12,9 @@ import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; +import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.indices.template.get.GetComponentTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetComposableIndexTemplateAction; @@ -144,16 +144,16 @@ public class ClusterPrivilegeResolver { HasPrivilegesAction.NAME ); private static final Set CREATE_SNAPSHOT_PATTERN = Set.of( - CreateSnapshotAction.NAME, - SnapshotsStatusAction.NAME + "*", - GetSnapshotsAction.NAME, - SnapshotsStatusAction.NAME, + TransportCreateSnapshotAction.TYPE.name(), + TransportSnapshotsStatusAction.TYPE.name() + "*", + TransportGetSnapshotsAction.TYPE.name(), + TransportSnapshotsStatusAction.TYPE.name(), GetRepositoriesAction.NAME ); private static final Set MONITOR_SNAPSHOT_PATTERN = Set.of( - SnapshotsStatusAction.NAME + "*", - GetSnapshotsAction.NAME, - SnapshotsStatusAction.NAME, + TransportSnapshotsStatusAction.TYPE.name() + "*", + TransportGetSnapshotsAction.TYPE.name(), + TransportSnapshotsStatusAction.TYPE.name(), GetRepositoriesAction.NAME ); private static final Set READ_CCR_PATTERN = Set.of(ClusterStateAction.NAME, HasPrivilegesAction.NAME); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 674706eb9af49..8443dc4083694 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; @@ -83,7 +83,7 @@ public final class IndexPrivilege extends Privilege { ); private static final Automaton READ_CROSS_CLUSTER_AUTOMATON = patterns( "internal:transport/proxy/indices:data/read/*", - ClusterSearchShardsAction.NAME, + TransportClusterSearchShardsAction.TYPE.name(), TransportSearchShardsAction.TYPE.name(), TransportResolveClusterAction.NAME, "indices:data/read/esql", @@ -127,7 +127,7 @@ public final class IndexPrivilege extends Privilege { GetIndexAction.NAME, GetFieldMappingsAction.NAME + "*", GetMappingsAction.NAME, - ClusterSearchShardsAction.NAME, + TransportClusterSearchShardsAction.TYPE.name(), TransportSearchShardsAction.TYPE.name(), ValidateQueryAction.NAME + "*", GetSettingsAction.NAME, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CreateSnapshotStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CreateSnapshotStepTests.java index b954162aee6f2..9d74227afe9fe 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CreateSnapshotStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CreateSnapshotStepTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -231,7 +231,7 @@ protected void Request request, ActionListener listener ) { - assertThat(action.name(), is(CreateSnapshotAction.NAME)); + assertThat(action.name(), is(TransportCreateSnapshotAction.TYPE.name())); assertTrue(request instanceof CreateSnapshotRequest); CreateSnapshotRequest createSnapshotRequest = (CreateSnapshotRequest) request; assertThat(createSnapshotRequest.indices().length, is(1)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index aa9bb1dd579bf..d15fb9a1409dd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -9,10 +9,10 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; +import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.common.util.set.Sets; @@ -305,12 +305,12 @@ public void testReadSecurityPrivilege() { InvalidateApiKeyAction.NAME, TransportClusterHealthAction.NAME, ClusterStateAction.NAME, - ClusterStatsAction.NAME, + TransportClusterStatsAction.TYPE.name(), NodeEnrollmentAction.NAME, KibanaEnrollmentAction.NAME, TransportPutIndexTemplateAction.TYPE.name(), GetIndexTemplatesAction.NAME, - ClusterRerouteAction.NAME, + TransportClusterRerouteAction.TYPE.name(), ClusterUpdateSettingsAction.NAME, ClearRealmCacheAction.NAME, ClearSecurityCacheAction.NAME, @@ -352,10 +352,10 @@ public void testManageUserProfilePrivilege() { ClusterPrivilegeResolver.MANAGE_USER_PROFILE, TransportClusterHealthAction.NAME, ClusterStateAction.NAME, - ClusterStatsAction.NAME, + TransportClusterStatsAction.TYPE.name(), TransportPutIndexTemplateAction.TYPE.name(), GetIndexTemplatesAction.NAME, - ClusterRerouteAction.NAME, + TransportClusterRerouteAction.TYPE.name(), ClusterUpdateSettingsAction.NAME ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 39a94e4a2f0bf..584f0a2d95fca 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -11,14 +11,14 @@ import org.elasticsearch.action.admin.cluster.remote.TransportRemoteInfoAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; -import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; +import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; +import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; @@ -303,9 +303,9 @@ public void testSnapshotUserRole() { FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); Role snapshotUserRole = Role.buildFromRoleDescriptor(roleDescriptor, fieldPermissionsCache, RESTRICTED_INDICES); assertThat(snapshotUserRole.cluster().check(GetRepositoriesAction.NAME, request, authentication), is(true)); - assertThat(snapshotUserRole.cluster().check(CreateSnapshotAction.NAME, request, authentication), is(true)); - assertThat(snapshotUserRole.cluster().check(SnapshotsStatusAction.NAME, request, authentication), is(true)); - assertThat(snapshotUserRole.cluster().check(GetSnapshotsAction.NAME, request, authentication), is(true)); + assertThat(snapshotUserRole.cluster().check(TransportCreateSnapshotAction.TYPE.name(), request, authentication), is(true)); + assertThat(snapshotUserRole.cluster().check(TransportSnapshotsStatusAction.TYPE.name(), request, authentication), is(true)); + assertThat(snapshotUserRole.cluster().check(TransportGetSnapshotsAction.TYPE.name(), request, authentication), is(true)); assertThat(snapshotUserRole.cluster().check(TransportPutRepositoryAction.TYPE.name(), request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(false)); @@ -313,7 +313,7 @@ public void testSnapshotUserRole() { assertThat(snapshotUserRole.cluster().check(PutPipelineTransportAction.TYPE.name(), request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(GetPipelineAction.NAME, request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(DeletePipelineTransportAction.TYPE.name(), request, authentication), is(false)); - assertThat(snapshotUserRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(snapshotUserRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(snapshotUserRole.cluster().check(GetWatchAction.NAME, request, authentication), is(false)); @@ -386,7 +386,7 @@ public void testIngestAdminRole() { assertThat(ingestAdminRole.cluster().check(PutPipelineTransportAction.TYPE.name(), request, authentication), is(true)); assertThat(ingestAdminRole.cluster().check(GetPipelineAction.NAME, request, authentication), is(true)); assertThat(ingestAdminRole.cluster().check(DeletePipelineTransportAction.TYPE.name(), request, authentication), is(true)); - assertThat(ingestAdminRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(ingestAdminRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(ingestAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(ingestAdminRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(ingestAdminRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -426,10 +426,10 @@ public void testKibanaSystemRole() { Role kibanaRole = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); assertThat(kibanaRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); assertThat(kibanaRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(kibanaRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); + assertThat(kibanaRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(true)); assertThat(kibanaRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(true)); assertThat(kibanaRole.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(true)); - assertThat(kibanaRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(kibanaRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(kibanaRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(kibanaRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); @@ -1704,9 +1704,9 @@ public void testKibanaAdminRole() { ); assertThat(kibanaAdminRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(kibanaAdminRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(kibanaAdminRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(kibanaAdminRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(kibanaAdminRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(kibanaAdminRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(kibanaAdminRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(kibanaAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(kibanaAdminRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(kibanaAdminRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -1768,9 +1768,9 @@ public void testKibanaUserRole() { ); assertThat(kibanaUserRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(kibanaUserRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(kibanaUserRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(kibanaUserRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(kibanaUserRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(kibanaUserRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(kibanaUserRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(kibanaUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(kibanaUserRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(kibanaUserRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -1851,9 +1851,9 @@ public void testMonitoringUserRole() { assertThat(monitoringUserRole.cluster().check(TransportRemoteInfoAction.TYPE.name(), request, authentication), is(true)); assertThat(monitoringUserRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(monitoringUserRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(monitoringUserRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(monitoringUserRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(monitoringUserRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(monitoringUserRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(monitoringUserRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(monitoringUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(monitoringUserRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(monitoringUserRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -1990,12 +1990,15 @@ public void testRemoteMonitoringAgentRole() { ); assertThat(remoteMonitoringAgentRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(true)); assertThat( remoteMonitoringAgentRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(true) ); - assertThat(remoteMonitoringAgentRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat( + remoteMonitoringAgentRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), + is(false) + ); assertThat(remoteMonitoringAgentRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(remoteMonitoringAgentRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(remoteMonitoringAgentRole.cluster().check(GetWatchAction.NAME, request, authentication), is(true)); @@ -2200,7 +2203,10 @@ public void testRemoteMonitoringCollectorRole() { ); assertThat(remoteMonitoringCollectorRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); assertThat(remoteMonitoringCollectorRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(remoteMonitoringCollectorRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); + assertThat( + remoteMonitoringCollectorRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), + is(true) + ); assertThat(remoteMonitoringCollectorRole.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(true)); assertThat( remoteMonitoringCollectorRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), @@ -2210,7 +2216,10 @@ public void testRemoteMonitoringCollectorRole() { remoteMonitoringCollectorRole.cluster().check(TransportDeleteIndexTemplateAction.TYPE.name(), request, authentication), is(false) ); - assertThat(remoteMonitoringCollectorRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat( + remoteMonitoringCollectorRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), + is(false) + ); assertThat(remoteMonitoringCollectorRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(remoteMonitoringCollectorRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(remoteMonitoringCollectorRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -2494,9 +2503,9 @@ public void testReportingUserRole() { ); assertThat(reportingUserRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(reportingUserRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(reportingUserRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(reportingUserRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(reportingUserRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(reportingUserRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(reportingUserRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(reportingUserRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(reportingUserRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(reportingUserRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -2708,9 +2717,9 @@ public void testLogstashSystemRole() { ); assertThat(logstashSystemRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); assertThat(logstashSystemRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(logstashSystemRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); + assertThat(logstashSystemRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(true)); assertThat(logstashSystemRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(logstashSystemRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(logstashSystemRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(logstashSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(logstashSystemRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); assertThat(logstashSystemRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); @@ -2749,9 +2758,9 @@ public void testBeatsAdminRole() { ); assertThat(beatsAdminRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(beatsAdminRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(beatsAdminRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(beatsAdminRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(beatsAdminRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(beatsAdminRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(beatsAdminRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(beatsAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(beatsAdminRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(beatsAdminRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -2814,9 +2823,9 @@ public void testBeatsSystemRole() { Role beatsSystemRole = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); assertThat(beatsSystemRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); assertThat(beatsSystemRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(beatsSystemRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); + assertThat(beatsSystemRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(true)); assertThat(beatsSystemRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(beatsSystemRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(beatsSystemRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(beatsSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(beatsSystemRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); assertThat(beatsSystemRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); @@ -2865,9 +2874,9 @@ public void testAPMSystemRole() { Role APMSystemRole = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); assertThat(APMSystemRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(true)); assertThat(APMSystemRole.cluster().check(ClusterStateAction.NAME, request, authentication), is(true)); - assertThat(APMSystemRole.cluster().check(ClusterStatsAction.NAME, request, authentication), is(true)); + assertThat(APMSystemRole.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(true)); assertThat(APMSystemRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(APMSystemRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(APMSystemRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(APMSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(APMSystemRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); assertThat(APMSystemRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); @@ -3527,9 +3536,9 @@ public void testPredefinedViewerRole() { // No cluster privileges assertThat(role.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(role.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(role.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(role.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -3601,9 +3610,9 @@ public void testPredefinedEditorRole() { // No cluster privileges assertThat(role.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(ClusterStateAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(ClusterStatsAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(TransportClusterStatsAction.TYPE.name(), request, authentication), is(false)); assertThat(role.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(role.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(role.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(role.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); @@ -3797,7 +3806,7 @@ private void assertViewIndexMetadata(Role role, String index) { GetIndexAction.NAME, GetFieldMappingsAction.NAME + "*", GetMappingsAction.NAME, - ClusterSearchShardsAction.NAME, + TransportClusterSearchShardsAction.TYPE.name(), TransportSearchShardsAction.TYPE.name(), ValidateQueryAction.NAME + "*", GetSettingsAction.NAME, @@ -3851,7 +3860,7 @@ public void testLogstashAdminRole() { ); assertThat(logstashAdminRole.cluster().check(TransportClusterHealthAction.NAME, request, authentication), is(false)); assertThat(logstashAdminRole.cluster().check(TransportPutIndexTemplateAction.TYPE.name(), request, authentication), is(false)); - assertThat(logstashAdminRole.cluster().check(ClusterRerouteAction.NAME, request, authentication), is(false)); + assertThat(logstashAdminRole.cluster().check(TransportClusterRerouteAction.TYPE.name(), request, authentication), is(false)); assertThat(logstashAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(logstashAdminRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(false)); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java index fe39822d869d3..fe0c905b35dad 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/LifecycleOperationSnapshotTests.java @@ -7,11 +7,11 @@ package org.elasticsearch.xpack.ilm; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; +import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -99,7 +99,7 @@ public void testModeSnapshotRestore() throws Exception { logger.info("--> checking for snapshot success"); try { GetSnapshotsResponse getResp = client().execute( - GetSnapshotsAction.INSTANCE, + TransportGetSnapshotsAction.TYPE, new GetSnapshotsRequest(new String[] { "repo" }, new String[] { snapshotName }) ).get(); assertThat(getResp.getSnapshots().size(), equalTo(1)); @@ -116,7 +116,7 @@ public void testModeSnapshotRestore() throws Exception { // Restore snapshot client().execute( - RestoreSnapshotAction.INSTANCE, + TransportRestoreSnapshotAction.TYPE, new RestoreSnapshotRequest("repo", snapshotName).includeGlobalState(true).indices(Strings.EMPTY_ARRAY).waitForCompletion(true) ).get(); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsUuidValidationIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsUuidValidationIntegTests.java index 49400b3574015..42ff8cb2f053d 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsUuidValidationIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsUuidValidationIntegTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; @@ -57,7 +57,7 @@ public static class RestoreBlockingActionFilter extends org.elasticsearch.action @Override protected boolean apply(String action, ActionRequest request, ActionListener listener) { - if (RestoreSnapshotAction.NAME.equals(action)) { + if (TransportRestoreSnapshotAction.TYPE.name().equals(action)) { executed.onResponse(null); unblocked.actionGet(); } diff --git a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java index bb76056e065f3..d9e870b031877 100644 --- a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java +++ b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.security; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; +import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.search.TransportSearchShardsAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -61,7 +61,7 @@ public class CrossClusterShardTests extends ESSingleNodeTestCase { DownsampleShardPersistentTaskExecutor.DelegatingAction.NAME, // These actions do not have any references to shard IDs in their requests. - ClusterSearchShardsAction.NAME + TransportClusterSearchShardsAction.TYPE.name() ); Set> CHECKED_ABSTRACT_CLASSES = Set.of( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java index 86845a2c07a26..3336fefa4447b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.core.Strings; @@ -94,14 +94,17 @@ public void testParseValidJsonWithJustIndexPrivileges() throws Exception { public void testParseValidJsonWithJustClusterPrivileges() throws Exception { String json = Strings.format(""" - { "cluster": [ "manage","%s","%s"] }""", TransportClusterHealthAction.NAME, ClusterStatsAction.NAME); + { "cluster": [ "manage","%s","%s"] }""", TransportClusterHealthAction.NAME, TransportClusterStatsAction.TYPE.name()); final HasPrivilegesRequestBuilder builder = new HasPrivilegesRequestBuilder(mock(Client.class)); builder.source("elastic", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON); final HasPrivilegesRequest request = builder.request(); assertThat(request.indexPrivileges().length, equalTo(0)); - assertThat(request.clusterPrivileges(), arrayContaining("manage", TransportClusterHealthAction.NAME, ClusterStatsAction.NAME)); + assertThat( + request.clusterPrivileges(), + arrayContaining("manage", TransportClusterHealthAction.NAME, TransportClusterStatsAction.TYPE.name()) + ); } public void testUseOfFieldLevelSecurityThrowsException() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index fa699d8e6e72e..ab5450f3ab4dd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -314,7 +314,7 @@ public void testSameUserPermissionDoesNotAllowOtherActions() { DeleteUserAction.NAME, TransportClusterHealthAction.NAME, ClusterStateAction.NAME, - ClusterStatsAction.NAME, + TransportClusterStatsAction.TYPE.name(), GetLicenseAction.NAME ); final Authentication authentication = AuthenticationTestHelper.builder().build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 39eda23e35eec..5816c3e24d560 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportGetAction; @@ -2719,7 +2719,7 @@ public void testAsyncSearchUserHasNoClusterPrivileges() { for (String action : Arrays.asList( ClusterStateAction.NAME, GetWatchAction.NAME, - ClusterStatsAction.NAME, + TransportClusterStatsAction.TYPE.name(), TransportNodesStatsAction.TYPE.name() )) { assertThat( @@ -2733,7 +2733,7 @@ public void testXpackUserHasClusterPrivileges() { for (String action : Arrays.asList( ClusterStateAction.NAME, GetWatchAction.NAME, - ClusterStatsAction.NAME, + TransportClusterStatsAction.TYPE.name(), TransportNodesStatsAction.TYPE.name() )) { assertThat( diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index 89cc45fb6e5a5..d1e6c56ae1517 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.cluster.SnapshotsInProgress; @@ -514,7 +514,7 @@ public void testSLMRetentionAfterRestore() throws Exception { restoreReq.renamePattern("(.+)"); restoreReq.renameReplacement("restored_$1"); restoreReq.waitForCompletion(true); - RestoreSnapshotResponse resp = client().execute(RestoreSnapshotAction.INSTANCE, restoreReq).get(); + RestoreSnapshotResponse resp = client().execute(TransportRestoreSnapshotAction.TYPE, restoreReq).get(); assertThat(resp.status(), equalTo(RestStatus.OK)); logger.info("--> executing SLM retention"); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java index f54cd4d4977d7..2c698a0383add 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java @@ -13,9 +13,9 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -61,6 +61,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.startsWith; public class SnapshotLifecycleTaskTests extends ESTestCase { @@ -178,7 +179,7 @@ public void testCreateSnapshotOnTrigger() { // request. It also returns a mock real response VerifyingClient client = new VerifyingClient(threadPool, (action, request, listener) -> { assertFalse(clientCalled.getAndSet(true)); - assertThat(action, instanceOf(CreateSnapshotAction.class)); + assertThat(action, sameInstance(TransportCreateSnapshotAction.TYPE)); assertThat(request, instanceOf(CreateSnapshotRequest.class)); CreateSnapshotRequest req = (CreateSnapshotRequest) request; @@ -249,7 +250,7 @@ public void testPartialFailureSnapshot() throws Exception { try (ClusterService clusterService = ClusterServiceUtils.createClusterService(state, threadPool, settings)) { VerifyingClient client = new VerifyingClient(threadPool, (action, request, listener) -> { assertFalse(clientCalled.getAndSet(true)); - assertThat(action, instanceOf(CreateSnapshotAction.class)); + assertThat(action, sameInstance(TransportCreateSnapshotAction.TYPE)); assertThat(request, instanceOf(CreateSnapshotRequest.class)); CreateSnapshotRequest req = (CreateSnapshotRequest) request; From dcb248a01381e2551dca2f8c65819b453e4674bc Mon Sep 17 00:00:00 2001 From: Howard Date: Sun, 21 Apr 2024 22:31:11 +0800 Subject: [PATCH 122/130] Add node stats effective watermark thresholds docs (#107668) Relates https://github.com/elastic/elasticsearch/pull/107244 --- docs/reference/cluster/nodes-stats.asciidoc | 62 +++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index a40d1f98cbd51..d0e4188ce74ed 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -1853,6 +1853,68 @@ store. (integer) Total number of bytes available to this Java virtual machine on this file store. + +`low_watermark_free_space`:: +(<>) +The effective low disk watermark for this data path on this node: when a node +has less free space than this value for at least one data path, its disk usage +has exceeded the low watermark. See <> for more +information about disk watermarks and their effects on shard allocation. + +`low_watermark_free_space_in_bytes`:: +(integer) +The effective low disk watermark, in bytes, for this data path on this node: +when a node has less free space than this value for at least one data path, its +disk usage has exceeded the low watermark. See <> +for more information about disk watermarks and their effects on shard +allocation. + +`high_watermark_free_space`:: +(<>) +The effective high disk watermark for this data path on this node: when a node +has less free space than this value for at least one data path, its disk usage +has exceeded the high watermark. See <> for more +information about disk watermarks and their effects on shard allocation. + +`high_watermark_free_space_in_bytes`:: +(integer) +The effective high disk watermark, in bytes, for this data path on this node: +when a node has less free space than this value for at least one data path, its +disk usage has exceeded the high watermark. See <> +for more information about disk watermarks and their effects on shard +allocation. + +`flood_stage_free_space`:: +(<>) +The effective flood stage disk watermark for this data path on this node: when +a node has less free space than this value for at least one data path, its disk +usage has exceeded the flood stage watermark. See +<> for more information about disk watermarks and +their effects on shard allocation. + +`flood_stage_free_space_in_bytes`:: +(integer) +The effective flood stage disk watermark, in bytes, for this data path on this +node: when a node has less free space than this value for at least one data +path, its disk usage has exceeded the flood stage watermark. See +<> for more information about disk watermarks and +their effects on shard allocation. + +`frozen_flood_stage_free_space`:: +(<>) +The effective flood stage disk watermark for this data path on a dedicated +frozen node: when a dedicated frozen node has less free space than this value +for at least one data path, its disk usage has exceeded the flood stage +watermark. See <> for more information about disk +watermarks and their effects on shard allocation. + +`frozen_flood_stage_free_space_in_bytes`:: +(integer) +The effective flood stage disk watermark, in bytes, for this data path on a +dedicated frozen node: when a dedicated frozen node has less free space than +this value for at least one data path, its disk usage has exceeded the flood +stage watermark. See <> for more information about +disk watermarks and their effects on shard allocation. ======= `io_stats` (Linux only):: From d9211fda92c03ffa2d614de561bbc7e260034692 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 22 Apr 2024 07:48:09 +0200 Subject: [PATCH 123/130] Optimize GeoBounds and GeoCentroid aggregations for single value fields (#107663) --- docs/changelog/107663.yaml | 5 + .../metrics/GeoBoundsAggregator.java | 100 +++++++++--------- .../metrics/GeoCentroidAggregator.java | 65 +++++++++--- 3 files changed, 105 insertions(+), 65 deletions(-) create mode 100644 docs/changelog/107663.yaml diff --git a/docs/changelog/107663.yaml b/docs/changelog/107663.yaml new file mode 100644 index 0000000000000..a7c3dc185425a --- /dev/null +++ b/docs/changelog/107663.yaml @@ -0,0 +1,5 @@ +pr: 107663 +summary: Optimize `GeoBounds` and `GeoCentroid` aggregations for single value fields +area: Geo +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java index 84be4f78901ff..fd967182145a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java @@ -11,6 +11,8 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.fielddata.FieldData; +import org.elasticsearch.index.fielddata.GeoPointValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -67,66 +69,66 @@ final class GeoBoundsAggregator extends MetricsAggregator { @Override public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) { final MultiGeoPointValues values = valuesSource.geoPointValues(aggCtx.getLeafReaderContext()); + final GeoPointValues singleton = FieldData.unwrapSingleton(values); + return singleton != null ? getLeafCollector(singleton, sub) : getLeafCollector(values, sub); + } + + private LeafBucketCollector getLeafCollector(MultiGeoPointValues values, LeafBucketCollector sub) { return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { - if (bucket >= tops.size()) { - long from = tops.size(); - tops = bigArrays().grow(tops, bucket + 1); - tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY); - bottoms = bigArrays().resize(bottoms, tops.size()); - bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY); - posLefts = bigArrays().resize(posLefts, tops.size()); - posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY); - posRights = bigArrays().resize(posRights, tops.size()); - posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY); - negLefts = bigArrays().resize(negLefts, tops.size()); - negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY); - negRights = bigArrays().resize(negRights, tops.size()); - negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY); + growBucket(bucket); + if (values.advanceExact(doc)) { + for (int i = 0; i < values.docValueCount(); ++i) { + addPoint(values.nextValue(), bucket); + } } + } + }; + } + private LeafBucketCollector getLeafCollector(GeoPointValues values, LeafBucketCollector sub) { + return new LeafBucketCollectorBase(sub, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + growBucket(bucket); if (values.advanceExact(doc)) { - final int valuesCount = values.docValueCount(); - - for (int i = 0; i < valuesCount; ++i) { - GeoPoint value = values.nextValue(); - double top = tops.get(bucket); - if (value.lat() > top) { - top = value.lat(); - } - double bottom = bottoms.get(bucket); - if (value.lat() < bottom) { - bottom = value.lat(); - } - double posLeft = posLefts.get(bucket); - if (value.lon() >= 0 && value.lon() < posLeft) { - posLeft = value.lon(); - } - double posRight = posRights.get(bucket); - if (value.lon() >= 0 && value.lon() > posRight) { - posRight = value.lon(); - } - double negLeft = negLefts.get(bucket); - if (value.lon() < 0 && value.lon() < negLeft) { - negLeft = value.lon(); - } - double negRight = negRights.get(bucket); - if (value.lon() < 0 && value.lon() > negRight) { - negRight = value.lon(); - } - tops.set(bucket, top); - bottoms.set(bucket, bottom); - posLefts.set(bucket, posLeft); - posRights.set(bucket, posRight); - negLefts.set(bucket, negLeft); - negRights.set(bucket, negRight); - } + addPoint(values.pointValue(), bucket); } } }; } + private void growBucket(long bucket) { + if (bucket >= tops.size()) { + long from = tops.size(); + tops = bigArrays().grow(tops, bucket + 1); + tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY); + bottoms = bigArrays().resize(bottoms, tops.size()); + bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY); + posLefts = bigArrays().resize(posLefts, tops.size()); + posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY); + posRights = bigArrays().resize(posRights, tops.size()); + posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY); + negLefts = bigArrays().resize(negLefts, tops.size()); + negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY); + negRights = bigArrays().resize(negRights, tops.size()); + negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY); + } + } + + private void addPoint(GeoPoint value, long bucket) { + tops.set(bucket, Math.max(tops.get(bucket), value.lat())); + bottoms.set(bucket, Math.min(bottoms.get(bucket), value.lat())); + if (value.lon() >= 0) { + posLefts.set(bucket, Math.min(posLefts.get(bucket), value.lon())); + posRights.set(bucket, Math.max(posRights.get(bucket), value.lon())); + } else { + negLefts.set(bucket, Math.min(negLefts.get(bucket), value.lon())); + negRights.set(bucket, Math.max(negRights.get(bucket), value.lon())); + } + } + @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) { if (owningBucketOrdinal >= tops.size()) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java index 27bd6b076bf4c..f766295f12eef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.fielddata.FieldData; +import org.elasticsearch.index.fielddata.GeoPointValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -53,35 +55,28 @@ final class GeoCentroidAggregator extends MetricsAggregator { @Override public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) throws IOException { final MultiGeoPointValues values = valuesSource.geoPointValues(aggCtx.getLeafReaderContext()); + final GeoPointValues singleton = FieldData.unwrapSingleton(values); + return singleton != null ? getLeafCollector(singleton, sub) : getLeafCollector(values, sub); + } + + private LeafBucketCollector getLeafCollector(MultiGeoPointValues values, LeafBucketCollector sub) { final CompensatedSum compensatedSumLat = new CompensatedSum(0, 0); final CompensatedSum compensatedSumLon = new CompensatedSum(0, 0); - return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { - latSum = bigArrays().grow(latSum, bucket + 1); - lonSum = bigArrays().grow(lonSum, bucket + 1); - lonCompensations = bigArrays().grow(lonCompensations, bucket + 1); - latCompensations = bigArrays().grow(latCompensations, bucket + 1); - counts = bigArrays().grow(counts, bucket + 1); - + growBucket(bucket); if (values.advanceExact(doc)) { final int valueCount = values.docValueCount(); // increment by the number of points for this document counts.increment(bucket, valueCount); // Compute the sum of double values with Kahan summation algorithm which is more // accurate than naive summation. - double sumLat = latSum.get(bucket); - double compensationLat = latCompensations.get(bucket); - double sumLon = lonSum.get(bucket); - double compensationLon = lonCompensations.get(bucket); - - compensatedSumLat.reset(sumLat, compensationLat); - compensatedSumLon.reset(sumLon, compensationLon); - + compensatedSumLat.reset(latSum.get(bucket), latCompensations.get(bucket)); + compensatedSumLon.reset(lonSum.get(bucket), lonCompensations.get(bucket)); // update the sum for (int i = 0; i < valueCount; ++i) { - GeoPoint value = values.nextValue(); + final GeoPoint value = values.nextValue(); // latitude compensatedSumLat.add(value.getLat()); // longitude @@ -96,6 +91,44 @@ public void collect(int doc, long bucket) throws IOException { }; } + private LeafBucketCollector getLeafCollector(GeoPointValues values, LeafBucketCollector sub) { + final CompensatedSum compensatedSum = new CompensatedSum(0, 0); + return new LeafBucketCollectorBase(sub, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + growBucket(bucket); + if (values.advanceExact(doc)) { + // increment by the number of points for this document + counts.increment(bucket, 1); + // Compute the sum of double values with Kahan summation algorithm which is more + // accurate than naive summation. + final GeoPoint value = values.pointValue(); + // latitude + compensatedSum.reset(latSum.get(bucket), latCompensations.get(bucket)); + compensatedSum.add(value.getLat()); + latSum.set(bucket, compensatedSum.value()); + latCompensations.set(bucket, compensatedSum.delta()); + // longitude + compensatedSum.reset(lonSum.get(bucket), lonCompensations.get(bucket)); + compensatedSum.add(value.getLon()); + lonSum.set(bucket, compensatedSum.value()); + lonCompensations.set(bucket, compensatedSum.delta()); + } + } + }; + } + + private void growBucket(long bucket) { + if (bucket >= latSum.size()) { + final long newSize = bucket + 1; + latSum = bigArrays().grow(latSum, newSize); + lonSum = bigArrays().grow(lonSum, newSize); + lonCompensations = bigArrays().grow(lonCompensations, newSize); + latCompensations = bigArrays().grow(latCompensations, newSize); + counts = bigArrays().grow(counts, newSize); + } + } + @Override public InternalAggregation buildAggregation(long bucket) { if (bucket >= counts.size()) { From 90a54ccfea12935aa80a6a556cae5283e9d576ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Mon, 22 Apr 2024 08:46:49 +0200 Subject: [PATCH 124/130] Fix testDynamicDeletionInterval intermittent failures (#107602) --- .../xpack/security/authc/InactiveApiKeysRemover.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InactiveApiKeysRemover.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InactiveApiKeysRemover.java index 14a77b7dfb244..e6acc749744c8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InactiveApiKeysRemover.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InactiveApiKeysRemover.java @@ -45,7 +45,7 @@ public final class InactiveApiKeysRemover extends AbstractRunnable { private final TimeValue timeout; private final AtomicLong retentionPeriodInMs; private final AtomicLong deleteIntervalInMs; - private volatile long lastRunMs; + private volatile long lastRunMs = -1; InactiveApiKeysRemover(Settings settings, Client client, ClusterService clusterService) { this.client = client; @@ -94,7 +94,7 @@ public void doRun() { } void maybeSubmit(ThreadPool threadPool) { - if (threadPool.relativeTimeInMillis() - lastRunMs > deleteIntervalInMs.get()) { + if (lastRunMs == -1 || threadPool.relativeTimeInMillis() - lastRunMs > deleteIntervalInMs.get()) { if (inProgress.compareAndSet(false, true)) { threadPool.executor(Names.GENERIC).submit(this); } From d3186ba46c36e6d89f82bb2bd372302b8f3f4818 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 22 Apr 2024 09:07:04 +0200 Subject: [PATCH 125/130] Fix WatcherMappingUpdateIT to run only for legacy versions (#107396) --- .../test/rest/RestTestLegacyFeatures.java | 11 ++++++++++- .../xpack/restart/WatcherMappingUpdateIT.java | 14 ++++++++++++-- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index 198226536af42..54647fea9cff7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -125,6 +125,14 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @UpdateForV9 public static final NodeFeature ML_NLP_SUPPORTED = new NodeFeature("ml.nlp_supported"); + /* + * Starting with 8.11, cluster state has minimum system index mappings versions (#99307) and the system index mappings upgrade service + * started using them to determine when to update mappings for system indices. See https://github.com/elastic/elasticsearch/pull/99668 + */ + public static final NodeFeature MAPPINGS_UPGRADE_SERVICE_USES_MAPPINGS_VERSION = new NodeFeature( + "mappings.upgrade_service_uses_mappings_version" + ); + // YAML public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); @@ -174,7 +182,8 @@ public Map getHistoricalFeatures() { entry(DATA_STREAMS_SUPPORTED, Version.V_7_9_0), entry(NEW_DATA_STREAMS_INDEX_NAME_FORMAT, Version.V_7_11_0), entry(DISABLE_FIELD_NAMES_FIELD_REMOVED, Version.V_8_0_0), - entry(ML_NLP_SUPPORTED, Version.V_8_0_0) + entry(ML_NLP_SUPPORTED, Version.V_8_0_0), + entry(MAPPINGS_UPGRADE_SERVICE_USES_MAPPINGS_VERSION, Version.V_8_11_0) ); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java index fb7c22845b788..768d74c1b68e5 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java @@ -10,15 +10,16 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; +import org.junit.Before; import java.nio.charset.StandardCharsets; import java.util.Base64; @@ -27,13 +28,22 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100282") +@UpdateForV9 // Remove the whole test suite (superseded by SystemIndexMappingUpdateServiceIT#testSystemIndexManagerUpgradesMappings) public class WatcherMappingUpdateIT extends AbstractXpackFullClusterRestartTestCase { public WatcherMappingUpdateIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) { super(upgradeStatus); } + @Before + public void setup() { + // This test is superseded by SystemIndexMappingUpdateServiceIT#testSystemIndexManagerUpgradesMappings for newer versions + assumeFalse( + "Starting from 8.11, the mappings upgrade service uses mappings versions instead of node versions", + clusterHasFeature(RestTestLegacyFeatures.MAPPINGS_UPGRADE_SERVICE_USES_MAPPINGS_VERSION) + ); + } + @Override protected Settings restClientSettings() { String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); From 04f5080cbb43f5fafc664585c7690ac883ac8ff1 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Mon, 22 Apr 2024 09:12:09 +0200 Subject: [PATCH 126/130] Improve test isolation in DeprecationHttpIT based on xOpaqueId (#107563) --- .../deprecation/DeprecationTestUtils.java | 11 +- .../plugin/deprecation/qa/rest/build.gradle | 1 + .../xpack/deprecation/DeprecationHttpIT.java | 239 +++++++----------- 3 files changed, 100 insertions(+), 151 deletions(-) diff --git a/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationTestUtils.java b/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationTestUtils.java index 6fecdaea31346..a9018c9f80a87 100644 --- a/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationTestUtils.java +++ b/x-pack/plugin/deprecation/qa/common/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationTestUtils.java @@ -17,18 +17,25 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.common.logging.DeprecatedMessage.X_OPAQUE_ID_FIELD_NAME; + public class DeprecationTestUtils { /** * Same as DeprecationIndexingAppender#DEPRECATION_MESSAGES_DATA_STREAM, but that class isn't visible from here. */ public static final String DATA_STREAM_NAME = ".logs-deprecation.elasticsearch-default"; - @SuppressWarnings("unchecked") static List> getIndexedDeprecations(RestClient client) throws IOException { + return getIndexedDeprecations(client, null); + } + + @SuppressWarnings("unchecked") + static List> getIndexedDeprecations(RestClient client, String xOpaqueId) throws IOException { Response response; try { client.performRequest(new Request("POST", "/" + DATA_STREAM_NAME + "/_refresh?ignore_unavailable=true")); - response = client.performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "/_search")); + String query = xOpaqueId == null ? "" : "?q=" + X_OPAQUE_ID_FIELD_NAME + ":" + xOpaqueId; + response = client.performRequest(new Request("GET", "/" + DATA_STREAM_NAME + "/_search" + query)); } catch (Exception e) { // It can take a moment for the index to be created. If it doesn't exist then the client // throws an exception. Translate it into an assertion error so that assertBusy() will diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 49138c6b5dce0..70c0cadbce375 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -27,6 +27,7 @@ restResources { testClusters.configureEach { testDistribution = 'DEFAULT' setting 'cluster.deprecation_indexing.enabled', 'true' + setting 'cluster.deprecation_indexing.flush_interval', '100ms' setting 'xpack.security.enabled', 'false' setting 'xpack.license.self_generated.type', 'trial' } diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java index 73c2fb607eb17..a0ce8b628e662 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java @@ -14,11 +14,9 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.Strings; @@ -31,13 +29,13 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.Matcher; -import org.junit.After; import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TestName; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -49,7 +47,6 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.hasEntry; @@ -60,65 +57,31 @@ /** * Tests that deprecation message are returned via response headers, and can be indexed into a data stream. */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101596") public class DeprecationHttpIT extends ESRestTestCase { + @Rule + public TestName testName = new TestName(); + + private String xOpaqueId() { + String name = testName.getMethodName(); + int pos = name.indexOf(" "); // additional suffix in case of repeated runs + return pos == -1 ? name : name.substring(0, pos) + "-" + name.hashCode(); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; // isolation is based on xOpaqueId + } + @Before public void assertIndexingIsEnabled() throws Exception { - // make sure the deprecation logs indexing is enabled - Response response = client().performRequest(new Request("GET", "/_cluster/settings?include_defaults=true&flat_settings=true")); - assertOK(response); + Response response = performScopedRequest(new Request("GET", "/_cluster/settings?include_defaults=true&flat_settings=true")); + ObjectMapper mapper = new ObjectMapper(); final JsonNode jsonNode = mapper.readTree(response.getEntity().getContent()); - final boolean defaultValue = jsonNode.at("/defaults/cluster.deprecation_indexing.enabled").asBoolean(); assertTrue(defaultValue); - - // assert index does not exist, which will prevent previous tests to interfere - assertBusy(() -> { - - try { - client().performRequest(new Request("GET", "/_data_stream/" + DeprecationTestUtils.DATA_STREAM_NAME)); - } catch (ResponseException e) { - if (e.getResponse().getStatusLine().getStatusCode() == 404) { - return; - } - } - - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); - logger.warn(documents); - // if data stream is still present, that means that previous test (could be different class) created a deprecation - // hence resetting again - resetDeprecationIndexAndCache(); - fail("Index should be removed on startup"); - }, 30, TimeUnit.SECONDS); - } - - @After - public void cleanUp() throws Exception { - resetDeprecationIndexAndCache(); - - // switch logging setting to default - configureWriteDeprecationLogsToIndex(null); - } - - private void resetDeprecationIndexAndCache() throws Exception { - // making sure the deprecation indexing cache is reset and index is deleted - assertBusy(() -> { - try { - client().performRequest(new Request("DELETE", "_logging/deprecation_cache")); - client().performRequest(new Request("DELETE", "/_data_stream/" + DeprecationTestUtils.DATA_STREAM_NAME)); - } catch (Exception e) { - throw new AssertionError(e); - } - }, 30, TimeUnit.SECONDS); - - assertBusy(() -> { - // wait for the data stream to really be deleted - var response = ESRestTestCase.entityAsMap(client().performRequest(new Request("GET", "/_data_stream"))); - assertThat((Collection) response.get("data_streams"), empty()); - }); } /** @@ -149,7 +112,7 @@ public void testDeprecatedSettingsReturnWarnings() throws Exception { final Request request = new Request("PUT", "_cluster/settings"); /// request.setJsonEntity(Strings.toString(builder)); - final Response response = client().performRequest(request); + final Response response = performScopedRequest(request); final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); assertThat(deprecatedWarnings, everyItem(matchesRegex(HeaderWarning.WARNING_HEADER_PATTERN))); @@ -174,11 +137,10 @@ public void testDeprecatedSettingsReturnWarnings() throws Exception { ); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); assertThat(documents, hasSize(2)); }); - } finally { cleanupSettings(); } @@ -197,7 +159,7 @@ private void cleanupSettings() throws IOException { final Request request = new Request("PUT", "_cluster/settings"); request.setJsonEntity(Strings.toString(builder)); - client().performRequest(request); + performScopedRequest(request); } /** @@ -219,19 +181,17 @@ public void testUniqueDeprecationResponsesMergedTogether() throws IOException { for (int j = 0; j < randomDocCount; j++) { final Request request = new Request("PUT", indices[i] + "/" + j); request.setJsonEntity("{ \"field\": " + j + " }"); - assertOK(client().performRequest(request)); + performScopedRequest(request); } } final String commaSeparatedIndices = String.join(",", indices); - client().performRequest(new Request("POST", commaSeparatedIndices + "/_refresh")); - + performScopedRequest(new Request("POST", commaSeparatedIndices + "/_refresh")); // trigger all index deprecations Request request = new Request("GET", "/" + commaSeparatedIndices + "/_search"); request.setJsonEntity("{ \"query\": { \"bool\": { \"filter\": [ { \"deprecated\": {} } ] } } }"); - Response response = client().performRequest(request); - assertOK(response); + Response response = performScopedRequest(request); final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); final List> headerMatchers = new ArrayList<>(); @@ -244,14 +204,12 @@ public void testUniqueDeprecationResponsesMergedTogether() throws IOException { } public void testDeprecationWarningsAppearInHeaders() throws Exception { - doTestDeprecationWarningsAppearInHeaders(); + doTestDeprecationWarningsAppearInHeaders(xOpaqueId()); } public void testDeprecationHeadersDoNotGetStuck() throws Exception { - doTestDeprecationWarningsAppearInHeaders(); - doTestDeprecationWarningsAppearInHeaders(); - if (rarely()) { - doTestDeprecationWarningsAppearInHeaders(); + for (int i = 0; i < 3; i++) { + doTestDeprecationWarningsAppearInHeaders(xOpaqueId() + "-" + i); } } @@ -260,7 +218,7 @@ public void testDeprecationHeadersDoNotGetStuck() throws Exception { *

    * Re-running this back-to-back helps to ensure that warnings are not being maintained across requests. */ - private void doTestDeprecationWarningsAppearInHeaders() throws Exception { + private void doTestDeprecationWarningsAppearInHeaders(String xOpaqueId) throws Exception { final boolean useDeprecatedField = randomBoolean(); final boolean useNonDeprecatedSetting = randomBoolean(); @@ -281,11 +239,7 @@ private void doTestDeprecationWarningsAppearInHeaders() throws Exception { // trigger all deprecations Request request = new Request("GET", "/_test_cluster/deprecated_settings"); request.setEntity(buildSettingsRequest(settings, useDeprecatedField ? "deprecated_settings" : "settings")); - String xOpaqueId = "XOpaqueId-doTestDeprecationWarningsAppearInHeaders" + randomInt(); - final RequestOptions options = request.getOptions().toBuilder().addHeader("X-Opaque-Id", xOpaqueId).build(); - request.setOptions(options); - Response response = client().performRequest(request); - assertOK(response); + Response response = performScopedRequest(request, xOpaqueId); final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); final List> headerMatchers = new ArrayList<>(4); @@ -303,25 +257,19 @@ private void doTestDeprecationWarningsAppearInHeaders() throws Exception { // expect to index same number of new deprecations as the number of header warnings in the response assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); - long indexedDeprecations = documents.stream().filter(m -> xOpaqueId.equals(m.get(X_OPAQUE_ID_FIELD_NAME))).count(); - assertThat(documents.toString(), indexedDeprecations, equalTo((long) headerMatchers.size())); + var documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId); + logger.warn(documents); + assertThat(documents, hasSize(headerMatchers.size())); }); - } public void testDeprecationRouteThrottling() throws Exception { - - final Request deprecatedRequest = deprecatedRequest("GET", "xOpaqueId-testDeprecationRouteThrottling"); - assertOK(client().performRequest(deprecatedRequest)); - - assertOK(client().performRequest(deprecatedRequest)); - - final Request postRequest = deprecatedRequest("POST", "xOpaqueId-testDeprecationRouteThrottling"); - assertOK(client().performRequest(postRequest)); + performScopedRequest(deprecatedRequest("GET")); + performScopedRequest(deprecatedRequest("GET")); + performScopedRequest(deprecatedRequest("POST")); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); @@ -347,40 +295,39 @@ public void testDeprecationRouteThrottling() throws Exception { } public void testDisableDeprecationLogIndexing() throws Exception { - final Request deprecatedRequest = deprecatedRequest("GET", "xOpaqueId-testDisableDeprecationLogIndexing"); - assertOK(client().performRequest(deprecatedRequest)); + performScopedRequest(deprecatedRequest("GET")); configureWriteDeprecationLogsToIndex(false); - final Request postRequest = deprecatedRequest("POST", "xOpaqueId-testDisableDeprecationLogIndexing"); - assertOK(client().performRequest(postRequest)); - - assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + try { + performScopedRequest(deprecatedRequest("POST")); + assertBusy(() -> { + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); - logger.warn(documents); + logger.warn(documents); - assertThat( - documents, - containsInAnyOrder( - allOf( - hasEntry(KEY_FIELD_NAME, "deprecated_route_GET_/_test_cluster/deprecated_settings"), - hasEntry("message", "[/_test_cluster/deprecated_settings] exists for deprecated tests") - ), - allOf( - hasEntry(KEY_FIELD_NAME, "deprecated_settings"), - hasEntry("message", "[deprecated_settings] usage is deprecated. use [settings] instead") + assertThat( + documents, + containsInAnyOrder( + allOf( + hasEntry(KEY_FIELD_NAME, "deprecated_route_GET_/_test_cluster/deprecated_settings"), + hasEntry("message", "[/_test_cluster/deprecated_settings] exists for deprecated tests") + ), + allOf( + hasEntry(KEY_FIELD_NAME, "deprecated_settings"), + hasEntry("message", "[deprecated_settings] usage is deprecated. use [settings] instead") + ) ) - ) - ); - }, 30, TimeUnit.SECONDS); + ); + }, 30, TimeUnit.SECONDS); + } finally { + configureWriteDeprecationLogsToIndex(null); + } } // triggers two deprecations - endpoint and setting - private Request deprecatedRequest(String method, String xOpaqueId) throws IOException { + private Request deprecatedRequest(String method) throws IOException { final Request getRequest = new Request(method, "/_test_cluster/deprecated_settings"); - final RequestOptions options = getRequest.getOptions().toBuilder().addHeader("X-Opaque-Id", xOpaqueId).build(); - getRequest.setOptions(options); getRequest.setEntity( buildSettingsRequest( Collections.singletonList(TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1), @@ -394,12 +341,10 @@ private Request deprecatedRequest(String method, String xOpaqueId) throws IOExce * Check that deprecation messages can be recorded to an index */ public void testDeprecationMessagesCanBeIndexed() throws Exception { - - final Request request = deprecatedRequest("GET", "xOpaqueId-testDeprecationMessagesCanBeIndexed"); - assertOK(client().performRequest(request)); + performScopedRequest(deprecatedRequest("GET")); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); @@ -410,7 +355,7 @@ public void testDeprecationMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testDeprecationMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "settings"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -428,7 +373,7 @@ public void testDeprecationMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testDeprecationMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "api"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -452,23 +397,17 @@ public void testDeprecationMessagesCanBeIndexed() throws Exception { * Check that a deprecation message with CRITICAL level can be recorded to an index */ public void testDeprecationCriticalWarnMessagesCanBeIndexed() throws Exception { - final Request request = new Request("GET", "/_test_cluster/only_deprecated_setting"); - final RequestOptions options = request.getOptions() - .toBuilder() - .addHeader("X-Opaque-Id", "xOpaqueId-testDeprecationCriticalWarnMessagesCanBeIndexed") - .build(); - request.setOptions(options); request.setEntity( buildSettingsRequest( Collections.singletonList(TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE3), "deprecation_critical" ) ); - assertOK(client().performRequest(request)); + performScopedRequest(request); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); @@ -479,7 +418,7 @@ public void testDeprecationCriticalWarnMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testDeprecationCriticalWarnMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "settings"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -505,21 +444,16 @@ public void testDeprecationCriticalWarnMessagesCanBeIndexed() throws Exception { public void testDeprecationWarnMessagesCanBeIndexed() throws Exception { final Request request = new Request("GET", "/_test_cluster/deprecated_settings"); - final RequestOptions options = request.getOptions() - .toBuilder() - .addHeader("X-Opaque-Id", "xOpaqueId-testDeprecationWarnMessagesCanBeIndexed") - .build(); - request.setOptions(options); request.setEntity( buildSettingsRequest( Collections.singletonList(TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1), "deprecation_warning" ) ); - assertOK(client().performRequest(request)); + performScopedRequest(request); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); @@ -530,7 +464,7 @@ public void testDeprecationWarnMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testDeprecationWarnMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "settings"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -548,7 +482,7 @@ public void testDeprecationWarnMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testDeprecationWarnMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "api"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -576,7 +510,6 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { final Request compatibleRequest = new Request("GET", "/_test_cluster/compat_only"); final RequestOptions compatibleOptions = compatibleRequest.getOptions() .toBuilder() - .addHeader("X-Opaque-Id", "xOpaqueId-testCompatibleMessagesCanBeIndexed") .addHeader("Accept", "application/vnd.elasticsearch+json;compatible-with=" + RestApiVersion.minimumSupported().major) .addHeader("Content-Type", "application/vnd.elasticsearch+json;compatible-with=" + RestApiVersion.minimumSupported().major) .build(); @@ -587,8 +520,7 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { "deprecated_settings" ) ); - Response deprecatedApiResponse = client().performRequest(compatibleRequest); - assertOK(deprecatedApiResponse); + Response deprecatedApiResponse = performScopedRequest(compatibleRequest); final List deprecatedWarnings = getWarningHeaders(deprecatedApiResponse.getHeaders()); final List actualWarningValues = deprecatedWarnings.stream() @@ -600,7 +532,7 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { ); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); @@ -611,7 +543,7 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testCompatibleMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "compatible_api"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -629,7 +561,7 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { hasKey("@timestamp"), hasKey("elasticsearch.cluster.name"), hasKey("elasticsearch.cluster.uuid"), - hasEntry(X_OPAQUE_ID_FIELD_NAME, "xOpaqueId-testCompatibleMessagesCanBeIndexed"), + hasEntry(X_OPAQUE_ID_FIELD_NAME, xOpaqueId()), hasEntry("elasticsearch.event.category", "compatible_api"), hasKey("elasticsearch.node.id"), hasKey("elasticsearch.node.name"), @@ -654,15 +586,14 @@ public void testCompatibleMessagesCanBeIndexed() throws Exception { */ public void testDeprecationIndexingCacheReset() throws Exception { - final Request deprecatedRequest = deprecatedRequest("GET", "xOpaqueId-testDeprecationIndexingCacheReset"); - assertOK(client().performRequest(deprecatedRequest)); + performScopedRequest(deprecatedRequest("GET")); - client().performRequest(new Request("DELETE", "/_logging/deprecation_cache")); + performScopedRequest(new Request("DELETE", "/_logging/deprecation_cache")); - assertOK(client().performRequest(deprecatedRequest)); + performScopedRequest(deprecatedRequest("GET")); assertBusy(() -> { - List> documents = DeprecationTestUtils.getIndexedDeprecations(client()); + List> documents = DeprecationTestUtils.getIndexedDeprecations(client(), xOpaqueId()); logger.warn(documents); @@ -694,8 +625,7 @@ public void testDeprecationIndexingCacheReset() throws Exception { private void configureWriteDeprecationLogsToIndex(Boolean value) throws IOException { final Request request = new Request("PUT", "_cluster/settings"); request.setJsonEntity("{ \"persistent\": { \"cluster.deprecation_indexing.enabled\": " + value + " } }"); - final Response response = client().performRequest(request); - assertOK(response); + performScopedRequest(request); } private List getWarningHeaders(Header[] headers) { @@ -715,6 +645,17 @@ private HttpEntity buildSettingsRequest(List> settings, String return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); } + private Response performScopedRequest(Request req) throws IOException { + return performScopedRequest(req, xOpaqueId()); + } + + private Response performScopedRequest(Request req, String xOpaqueId) throws IOException { + req.setOptions(req.getOptions().toBuilder().addHeader("X-Opaque-Id", xOpaqueId).build()); + Response response = client().performRequest(req); + assertOK(response); + return response; + } + /** * Builds a REST client that will tolerate warnings in the response headers. The default * is to throw an exception. From 430d920643093f1582df4500ee706fd4709d199a Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 22 Apr 2024 08:17:38 +0100 Subject: [PATCH 127/130] Rename `timeout` to `ackTimeout` (#107652) Elasticsearch has various timeout parameters, each of which applies in a different context. One commonly-encountered such timeout is `AcknowledgedRequest#timeout()`. Despite its unqualified name, this timeout has a very specific meaning. This commit renames all code usages to `ackTimeout()` to better clarify what it's for. Relates #107044 --- .../CreateDataStreamTransportAction.java | 2 +- .../MigrateToDataStreamTransportAction.java | 2 +- .../RestPutDataStreamLifecycleAction.java | 2 +- .../rest/RestModifyDataStreamsAction.java | 2 +- .../reroute/ClusterRerouteRequest.java | 4 +- .../alias/TransportIndicesAliasesAction.java | 2 +- .../close/TransportCloseIndexAction.java | 2 +- .../indices/create/AutoCreateAction.java | 8 +- .../create/TransportCreateIndexAction.java | 4 +- .../delete/TransportDeleteIndexAction.java | 2 +- .../put/TransportPutMappingAction.java | 2 +- .../open/TransportOpenIndexAction.java | 2 +- .../TransportAddIndexBlockAction.java | 2 +- .../rollover/MetadataRolloverService.java | 4 +- .../put/TransportUpdateSettingsAction.java | 2 +- .../settings/put/UpdateSettingsRequest.java | 4 +- .../indices/shrink/TransportResizeAction.java | 2 +- .../support/master/AcknowledgedRequest.java | 88 ++++++++++++++----- .../master/AcknowledgedRequestBuilder.java | 4 +- .../action/index/MappingUpdatedAction.java | 2 +- .../elasticsearch/indices/IndicesService.java | 13 +-- .../cluster/RestCleanupRepositoryAction.java | 2 +- .../cluster/RestClusterRerouteAction.java | 2 +- .../RestClusterUpdateSettingsAction.java | 2 +- .../cluster/RestDeleteRepositoryAction.java | 2 +- .../cluster/RestDeleteStoredScriptAction.java | 2 +- .../cluster/RestPutRepositoryAction.java | 2 +- .../cluster/RestPutStoredScriptAction.java | 2 +- .../cluster/RestVerifyRepositoryAction.java | 2 +- .../RestDeleteDanglingIndexAction.java | 2 +- .../RestImportDanglingIndexAction.java | 2 +- .../indices/RestAddIndexBlockAction.java | 2 +- .../admin/indices/RestCloseIndexAction.java | 2 +- .../admin/indices/RestCreateIndexAction.java | 4 +- .../admin/indices/RestDeleteIndexAction.java | 2 +- .../indices/RestIndexDeleteAliasesAction.java | 2 +- .../indices/RestIndexPutAliasAction.java | 2 +- .../indices/RestIndicesAliasesAction.java | 2 +- .../admin/indices/RestOpenIndexAction.java | 2 +- .../admin/indices/RestPutMappingAction.java | 2 +- .../admin/indices/RestResizeHandler.java | 2 +- .../indices/RestRolloverIndexAction.java | 2 +- .../indices/RestUpdateSettingsAction.java | 2 +- .../ingest/RestDeletePipelineAction.java | 2 +- .../action/ingest/RestPutPipelineAction.java | 2 +- .../reroute/ClusterRerouteRequestTests.java | 12 +-- .../cluster/reroute/ClusterRerouteTests.java | 4 +- .../alias/IndicesAliasesRequestTests.java | 2 +- .../indices/close/CloseIndexRequestTests.java | 10 +-- ...dateSettingsRequestSerializationTests.java | 6 +- .../ccr/PrimaryFollowerAllocationIT.java | 2 +- .../ccr/action/TransportPutFollowAction.java | 2 +- .../license/RestDeleteLicenseAction.java | 2 +- .../license/RestPostStartBasicLicense.java | 2 +- .../license/RestPutLicenseAction.java | 2 +- .../DeleteDataFrameAnalyticsAction.java | 6 +- .../core/ml/action/GetMlAutoscalingStats.java | 4 +- .../UpdateTrainedModelDeploymentAction.java | 2 +- .../action/DeleteTransformAction.java | 4 +- .../transform/action/PutTransformAction.java | 4 +- .../action/ResetTransformAction.java | 4 +- .../action/StartTransformAction.java | 4 +- .../action/UpgradeTransformsAction.java | 4 +- .../action/ValidateTransformAction.java | 4 +- ...eDataFrameAnalyticsActionRequestTests.java | 6 +- .../GetMlAutoscalingStatsRequestTests.java | 2 +- ...tDataFrameAnalyticsActionRequestTests.java | 4 +- ...eDataFrameAnalyticsActionRequestTests.java | 4 +- .../DeleteTransformActionRequestTests.java | 2 +- .../PutTransformActionRequestTests.java | 2 +- .../StartTransformActionRequestTests.java | 2 +- .../UpgradeTransformsActionRequestTests.java | 2 +- .../ValidateTransformActionRequestTests.java | 2 +- .../action/TransportFreezeIndexAction.java | 4 +- .../rest/action/RestFreezeIndexAction.java | 2 +- .../ilm/action/RestDeleteLifecycleAction.java | 2 +- .../ilm/action/RestGetLifecycleAction.java | 2 +- .../xpack/ilm/action/RestGetStatusAction.java | 2 +- .../ilm/action/RestMoveToStepAction.java | 2 +- .../ilm/action/RestPutLifecycleAction.java | 2 +- .../xpack/ilm/action/RestRetryAction.java | 2 +- .../xpack/ilm/action/RestStartILMAction.java | 2 +- .../xpack/ilm/action/RestStopAction.java | 2 +- ...ansportDeleteDataFrameAnalyticsAction.java | 4 +- .../ml/action/TransportDeleteJobAction.java | 2 +- .../TransportDeleteTrainedModelAction.java | 7 +- .../ml/action/TransportMlMemoryAction.java | 4 +- .../TransportPutTrainedModelAction.java | 2 +- .../ml/action/TransportResetJobAction.java | 2 +- .../TransportRevertModelSnapshotAction.java | 2 +- .../action/TransportSetUpgradeModeAction.java | 4 +- .../task/OpenJobPersistentTasksExecutor.java | 4 +- .../xpack/ml/rest/RestMlMemoryAction.java | 2 +- .../ml/rest/RestSetUpgradeModeAction.java | 2 +- .../datafeeds/RestDeleteDatafeedAction.java | 2 +- .../rest/datafeeds/RestPutDatafeedAction.java | 2 +- .../datafeeds/RestUpdateDatafeedAction.java | 2 +- .../RestDeleteDataFrameAnalyticsAction.java | 4 +- ...estPostDataFrameAnalyticsUpdateAction.java | 2 +- .../RestPutDataFrameAnalyticsAction.java | 2 +- .../rest/filter/RestDeleteFilterAction.java | 2 +- .../RestDeleteTrainedModelAction.java | 2 +- .../inference/RestPutTrainedModelAction.java | 2 +- ...estUpdateTrainedModelDeploymentAction.java | 2 +- .../ml/rest/job/RestDeleteForecastAction.java | 2 +- .../ml/rest/job/RestDeleteJobAction.java | 2 +- .../ml/rest/job/RestPostJobUpdateAction.java | 2 +- .../xpack/ml/rest/job/RestPutJobAction.java | 2 +- .../xpack/ml/rest/job/RestResetJobAction.java | 2 +- .../RestRevertModelSnapshotAction.java | 2 +- .../profiling/action/GetStatusActionIT.java | 4 +- .../action/TransportGetStatusAction.java | 2 +- .../profiling/rest/RestGetStatusAction.java | 2 +- ...TransportUpdateSecuritySettingsAction.java | 6 +- .../RestDeleteSnapshotLifecycleAction.java | 2 +- .../RestExecuteSnapshotLifecycleAction.java | 2 +- .../RestExecuteSnapshotRetentionAction.java | 2 +- .../slm/action/RestGetSLMStatusAction.java | 2 +- .../RestGetSnapshotLifecycleAction.java | 2 +- .../RestGetSnapshotLifecycleStatsAction.java | 2 +- .../RestPutSnapshotLifecycleAction.java | 2 +- .../xpack/slm/action/RestStartSLMAction.java | 2 +- .../xpack/slm/action/RestStopSLMAction.java | 2 +- .../TransportDeleteTransformAction.java | 6 +- .../TransportPreviewTransformAction.java | 2 +- .../action/TransportPutTransformAction.java | 2 +- .../action/TransportResetTransformAction.java | 2 +- .../action/TransportStartTransformAction.java | 4 +- .../TransportUpgradeTransformsAction.java | 2 +- .../TransportValidateTransformAction.java | 2 +- .../TransportUpdateWatcherSettingsAction.java | 2 +- .../plugin/freeze/FreezeIndexPlugin.java | 2 +- 132 files changed, 260 insertions(+), 198 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/CreateDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/CreateDataStreamTransportAction.java index 7964934004dd0..36f5ecaadd446 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/CreateDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/CreateDataStreamTransportAction.java @@ -72,7 +72,7 @@ protected void masterOperation( request.getStartTime(), systemDataStreamDescriptor, request.masterNodeTimeout(), - request.timeout(), + request.ackTimeout(), true ); metadataCreateDataStreamService.createDataStream(updateRequest, listener); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/MigrateToDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/MigrateToDataStreamTransportAction.java index ba013e46926d3..adbbfe7b28541 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/MigrateToDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/MigrateToDataStreamTransportAction.java @@ -69,7 +69,7 @@ protected void masterOperation( new MetadataMigrateToDataStreamService.MigrateToDataStreamClusterStateUpdateRequest( request.getAliasName(), request.masterNodeTimeout(), - request.timeout() + request.ackTimeout() ); metadataMigrateToDataStreamService.migrateToDataStream(updateRequest, listener); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java index f763c0d75ed47..70228a16d7a01 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/rest/RestPutDataStreamLifecycleAction.java @@ -42,7 +42,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli PutDataStreamLifecycleAction.Request putLifecycleRequest = PutDataStreamLifecycleAction.Request.parseRequest(parser); putLifecycleRequest.indices(Strings.splitStringByCommaToArray(request.param("name"))); putLifecycleRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putLifecycleRequest.masterNodeTimeout())); - putLifecycleRequest.timeout(request.paramAsTime("timeout", putLifecycleRequest.timeout())); + putLifecycleRequest.ackTimeout(request.paramAsTime("timeout", putLifecycleRequest.ackTimeout())); putLifecycleRequest.indicesOptions(IndicesOptions.fromRequest(request, putLifecycleRequest.indicesOptions())); return channel -> client.execute( PutDataStreamLifecycleAction.INSTANCE, diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestModifyDataStreamsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestModifyDataStreamsAction.java index 14b0ed7be1bdb..006422e4c04e7 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestModifyDataStreamsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/rest/RestModifyDataStreamsAction.java @@ -44,7 +44,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli throw new IllegalArgumentException("no data stream actions specified, at least one must be specified"); } modifyDsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", modifyDsRequest.masterNodeTimeout())); - modifyDsRequest.timeout(request.paramAsTime("timeout", modifyDsRequest.timeout())); + modifyDsRequest.ackTimeout(request.paramAsTime("timeout", modifyDsRequest.ackTimeout())); return channel -> client.execute(ModifyDataStreamsAction.INSTANCE, modifyDsRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java index 52c44c33be3bf..5aeef6b19298e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java @@ -129,7 +129,7 @@ public boolean equals(Object obj) { return Objects.equals(commands, other.commands) && Objects.equals(dryRun, other.dryRun) && Objects.equals(explain, other.explain) - && Objects.equals(timeout, other.timeout) + && Objects.equals(ackTimeout(), other.ackTimeout()) && Objects.equals(retryFailed, other.retryFailed) && Objects.equals(masterNodeTimeout, other.masterNodeTimeout); } @@ -137,6 +137,6 @@ public boolean equals(Object obj) { @Override public int hashCode() { // Override equals and hashCode for testing - return Objects.hash(commands, dryRun, explain, timeout, retryFailed, masterNodeTimeout); + return Objects.hash(commands, dryRun, explain, ackTimeout(), retryFailed, masterNodeTimeout); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 2e231b398af72..f9c255bb057d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -256,7 +256,7 @@ protected void masterOperation( IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest( unmodifiableList(finalActions), unmodifiableList(actionResults) - ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); + ).ackTimeout(request.ackTimeout()).masterNodeTimeout(request.masterNodeTimeout()); indexAliasesService.indicesAliases(updateRequest, listener.delegateResponse((l, e) -> { logger.debug("failed to perform aliases", e); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 5fe1ba459ba93..f87ea1e4cd6c7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -120,7 +120,7 @@ protected void masterOperation( } final CloseIndexClusterStateUpdateRequest closeRequest = new CloseIndexClusterStateUpdateRequest(task.getId()).ackTimeout( - request.timeout() + request.ackTimeout() ).masterNodeTimeout(request.masterNodeTimeout()).waitForActiveShards(request.waitForActiveShards()).indices(concreteIndices); indexStateService.closeIndices(closeRequest, listener.delegateResponse((delegatedListener, t) -> { logger.debug(() -> "failed to close indices [" + Arrays.toString(concreteIndices) + "]", t); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index 98848f041cea2..9cd7d713a3a4c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -190,7 +190,7 @@ public void onAllNodesAcked() { clusterService, indexNames.toArray(String[]::new), ActiveShardCount.DEFAULT, - request.timeout(), + request.ackTimeout(), allocationActionMultiListener.delay(listener) .map(shardsAcked -> new CreateIndexResponse(true, shardsAcked, indexNames.get(0))) ); @@ -253,7 +253,7 @@ ClusterState execute( request.index(), dataStreamDescriptor, request.masterNodeTimeout(), - request.timeout(), + request.ackTimeout(), false ); assert createRequest.performReroute() == false @@ -348,7 +348,7 @@ private CreateIndexClusterStateUpdateRequest buildUpdateRequest(String indexName request.cause(), indexName, request.index() - ).ackTimeout(request.timeout()).performReroute(false).masterNodeTimeout(request.masterNodeTimeout()); + ).ackTimeout(request.ackTimeout()).performReroute(false).masterNodeTimeout(request.masterNodeTimeout()); logger.debug("Auto-creating index {}", indexName); return updateRequest; } @@ -365,7 +365,7 @@ private CreateIndexClusterStateUpdateRequest buildSystemIndexUpdateRequest(Strin request.cause(), concreteIndexName, request.index() - ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()).performReroute(false); + ).ackTimeout(request.ackTimeout()).masterNodeTimeout(request.masterNodeTimeout()).performReroute(false); updateRequest.waitForActiveShards(ActiveShardCount.ALL); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index 72f4c4676cf1d..e62205938f6fd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -165,7 +165,7 @@ private CreateIndexClusterStateUpdateRequest buildUpdateRequest( alias.isHidden(true); } }).collect(Collectors.toSet()); - return new CreateIndexClusterStateUpdateRequest(cause, indexName, request.index()).ackTimeout(request.timeout()) + return new CreateIndexClusterStateUpdateRequest(cause, indexName, request.index()).ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()) .settings(request.settings()) .mappings(request.mappings()) @@ -201,7 +201,7 @@ private static CreateIndexClusterStateUpdateRequest buildSystemIndexUpdateReques request.index() ); - return updateRequest.ackTimeout(request.timeout()) + return updateRequest.ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()) .aliases(aliases) .waitForActiveShards(ActiveShardCount.ALL) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index 1eb8612d16da3..35034eebfaa93 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -94,7 +94,7 @@ protected void masterOperation( DeleteIndexClusterStateUpdateRequest deleteRequest = new DeleteIndexClusterStateUpdateRequest(listener.delegateResponse((l, e) -> { logger.debug(() -> "failed to delete indices [" + concreteIndices + "]", e); listener.onFailure(e); - })).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()).indices(concreteIndices.toArray(new Index[0])); + })).ackTimeout(request.ackTimeout()).masterNodeTimeout(request.masterNodeTimeout()).indices(concreteIndices.toArray(new Index[0])); deleteIndexService.deleteIndices(deleteRequest); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index e93bc1501744d..ca6e97c3e1334 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -157,7 +157,7 @@ static void performMappingUpdate( final PutMappingClusterStateUpdateRequest updateRequest; try { updateRequest = new PutMappingClusterStateUpdateRequest(request.source()).indices(concreteIndices) - .ackTimeout(request.timeout()) + .ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()) .autoUpdate(autoUpdate); } catch (IOException e) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 309e9d841c97a..e8fc62d480bc4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -89,7 +89,7 @@ protected void masterOperation( listener.onResponse(new OpenIndexResponse(true, true)); return; } - OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest().ackTimeout(request.timeout()) + OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest().ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices) .waitForActiveShards(request.waitForActiveShards()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java index ab2549bf9de67..72731bc636b13 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java @@ -104,7 +104,7 @@ protected void masterOperation( final AddIndexBlockClusterStateUpdateRequest addBlockRequest = new AddIndexBlockClusterStateUpdateRequest( request.getBlock(), task.getId() - ).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()).indices(concreteIndices); + ).ackTimeout(request.ackTimeout()).masterNodeTimeout(request.masterNodeTimeout()).indices(concreteIndices); indexStateService.addIndexBlock(addBlockRequest, listener.delegateResponse((delegatedListener, t) -> { logger.debug(() -> "failed to mark indices as readonly [" + Arrays.toString(concreteIndices) + "]", t); delegatedListener.onFailure(t); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index cef0b3797b1d4..45368c185fb77 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -523,7 +523,9 @@ static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest( if (settings != null) { b.put(settings); } - return new CreateIndexClusterStateUpdateRequest(cause, targetIndexName, providedIndexName).ackTimeout(createIndexRequest.timeout()) + return new CreateIndexClusterStateUpdateRequest(cause, targetIndexName, providedIndexName).ackTimeout( + createIndexRequest.ackTimeout() + ) .masterNodeTimeout(createIndexRequest.masterNodeTimeout()) .settings(b.build()) .aliases(createIndexRequest.aliases()) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index 164c403d1b516..36c0634fa9dba 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -129,7 +129,7 @@ protected void masterOperation( .settings(requestSettings) .setPreserveExisting(request.isPreserveExisting()) .reopenShards(request.reopen()) - .ackTimeout(request.timeout()) + .ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()); updateSettingsService.updateSettings(clusterStateUpdateRequest, listener.delegateResponse((l, e) -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index b6f03f1477951..666419edc1bf0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -255,7 +255,7 @@ public boolean equals(Object o) { } UpdateSettingsRequest that = (UpdateSettingsRequest) o; return masterNodeTimeout.equals(that.masterNodeTimeout) - && timeout.equals(that.timeout) + && ackTimeout().equals(that.ackTimeout()) && Objects.equals(settings, that.settings) && Objects.equals(indicesOptions, that.indicesOptions) && Objects.equals(preserveExisting, that.preserveExisting) @@ -265,7 +265,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(masterNodeTimeout, timeout, settings, indicesOptions, preserveExisting, reopen, Arrays.hashCode(indices)); + return Objects.hash(masterNodeTimeout, ackTimeout(), settings, indicesOptions, preserveExisting, reopen, Arrays.hashCode(indices)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index fbae64dcb6d45..8a2eb18b5164f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -233,7 +233,7 @@ static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest( // mappings are updated on the node when creating in the shards, this prevents race-conditions since all mapping must be // applied once we took the snapshot and if somebody messes things up and switches the index read/write and adds docs we // miss the mappings for everything is corrupted and hard to debug - .ackTimeout(targetIndex.timeout()) + .ackTimeout(targetIndex.ackTimeout()) .masterNodeTimeout(targetIndex.masterNodeTimeout()) .settings(targetIndex.settings()) .aliases(targetIndex.aliases()) diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index c32ad9cf41a74..5813741bba258 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -14,6 +14,7 @@ import org.elasticsearch.core.TimeValue; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.core.TimeValue.timeValueSeconds; @@ -27,60 +28,102 @@ public abstract class AcknowledgedRequest { - public Plain(StreamInput in) throws IOException { super(in); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java index b26f6faa08818..65892094b4c46 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java @@ -32,7 +32,7 @@ protected AcknowledgedRequestBuilder(ElasticsearchClient client, ActionType { assert recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS : "mapping update consumer only required by local shards recovery"; + AcknowledgedRequest putMappingRequestAcknowledgedRequest = new PutMappingRequest().setConcreteIndex( + shardRouting.index() + ) + .setConcreteIndex(shardRouting.index()) // concrete index - no name clash, it uses uuid + .source(mapping.source().string(), XContentType.JSON); + // concrete index - no name clash, it uses uuid client.execute( featureService.clusterHasFeature(clusterService.state(), SUPPORTS_AUTO_PUT) ? TransportAutoPutMappingAction.TYPE : TransportPutMappingAction.TYPE, - new PutMappingRequest().setConcreteIndex(shardRouting.index()) - .setConcreteIndex(shardRouting.index()) // concrete index - no name clash, it uses uuid - .source(mapping.source().string(), XContentType.JSON) - .timeout(TimeValue.MAX_VALUE) - .masterNodeTimeout(TimeValue.MAX_VALUE), + putMappingRequestAcknowledgedRequest.ackTimeout(TimeValue.MAX_VALUE).masterNodeTimeout(TimeValue.MAX_VALUE), new RefCountAwareThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) ); }, this, clusterStateVersion); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java index 8792393567985..534bb391e9ffe 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCleanupRepositoryAction.java @@ -41,7 +41,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String name = request.param("repository"); CleanupRepositoryRequest cleanupRepositoryRequest = new CleanupRepositoryRequest(name); - cleanupRepositoryRequest.timeout(request.paramAsTime("timeout", cleanupRepositoryRequest.timeout())); + cleanupRepositoryRequest.ackTimeout(request.paramAsTime("timeout", cleanupRepositoryRequest.ackTimeout())); cleanupRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", cleanupRepositoryRequest.masterNodeTimeout())); return channel -> client.admin().cluster().cleanupRepository(cleanupRepositoryRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java index 468cf30c8de54..a42882d1144c2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java @@ -96,7 +96,7 @@ public static ClusterRerouteRequest createRequest(RestRequest request) throws IO ClusterRerouteRequest clusterRerouteRequest = new ClusterRerouteRequest(); clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun())); clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain())); - clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout())); + clusterRerouteRequest.ackTimeout(request.paramAsTime("timeout", clusterRerouteRequest.ackTimeout())); clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed())); clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout())); request.applyContentParser(parser -> PARSER.parse(parser, clusterRerouteRequest, null)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java index 54dbe7f6aa825..cf22e403e1def 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java @@ -44,7 +44,7 @@ public String getName() { @SuppressWarnings("unchecked") public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = new ClusterUpdateSettingsRequest(); - clusterUpdateSettingsRequest.timeout(request.paramAsTime("timeout", clusterUpdateSettingsRequest.timeout())); + clusterUpdateSettingsRequest.ackTimeout(request.paramAsTime("timeout", clusterUpdateSettingsRequest.ackTimeout())); clusterUpdateSettingsRequest.masterNodeTimeout( request.paramAsTime("master_timeout", clusterUpdateSettingsRequest.masterNodeTimeout()) ); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java index 859c21535cc81..3dc979f295530 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java @@ -44,7 +44,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String name = request.param("repository"); DeleteRepositoryRequest deleteRepositoryRequest = new DeleteRepositoryRequest(name); - deleteRepositoryRequest.timeout(request.paramAsTime("timeout", deleteRepositoryRequest.timeout())); + deleteRepositoryRequest.ackTimeout(request.paramAsTime("timeout", deleteRepositoryRequest.ackTimeout())); deleteRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteRepositoryRequest.masterNodeTimeout())); return channel -> client.admin() .cluster() diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java index 9f9dd8ee9f97f..ca6a9b5fe1f22 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java @@ -37,7 +37,7 @@ public String getName() { public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { String id = request.param("id"); DeleteStoredScriptRequest deleteStoredScriptRequest = new DeleteStoredScriptRequest(id); - deleteStoredScriptRequest.timeout(request.paramAsTime("timeout", deleteStoredScriptRequest.timeout())); + deleteStoredScriptRequest.ackTimeout(request.paramAsTime("timeout", deleteStoredScriptRequest.ackTimeout())); deleteStoredScriptRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteStoredScriptRequest.masterNodeTimeout())); return channel -> client.admin().cluster().deleteStoredScript(deleteStoredScriptRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java index 69445b066ca3c..1fe1f8da2e5c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java @@ -51,7 +51,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } putRepositoryRequest.verify(request.paramAsBoolean("verify", true)); putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout())); - putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout())); + putRepositoryRequest.ackTimeout(request.paramAsTime("timeout", putRepositoryRequest.ackTimeout())); return channel -> client.admin() .cluster() .putRepository( diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java index 5631034785d9c..984882edcffaa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java @@ -52,7 +52,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client PutStoredScriptRequest putRequest = new PutStoredScriptRequest(id, context, content, request.getXContentType(), source); putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); - putRequest.timeout(request.paramAsTime("timeout", putRequest.timeout())); + putRequest.ackTimeout(request.paramAsTime("timeout", putRequest.ackTimeout())); return channel -> client.admin().cluster().putStoredScript(putRequest, new RestToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java index c8595778c314f..1eda532439e19 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java @@ -39,7 +39,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String name = request.param("repository"); VerifyRepositoryRequest verifyRepositoryRequest = new VerifyRepositoryRequest(name); verifyRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", verifyRepositoryRequest.masterNodeTimeout())); - verifyRepositoryRequest.timeout(request.paramAsTime("timeout", verifyRepositoryRequest.timeout())); + verifyRepositoryRequest.ackTimeout(request.paramAsTime("timeout", verifyRepositoryRequest.ackTimeout())); return channel -> client.admin().cluster().verifyRepository(verifyRepositoryRequest, new RestToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java index b8a7179f8cfb7..6fbfaa11b83e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestDeleteDanglingIndexAction.java @@ -40,7 +40,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient request.paramAsBoolean("accept_data_loss", false) ); - deleteRequest.timeout(request.paramAsTime("timeout", deleteRequest.timeout())); + deleteRequest.ackTimeout(request.paramAsTime("timeout", deleteRequest.ackTimeout())); deleteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteRequest.masterNodeTimeout())); return channel -> client.execute( diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java index 9fa46fd9b0a3c..b20eac028ba02 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/dangling/RestImportDanglingIndexAction.java @@ -39,7 +39,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient request.paramAsBoolean("accept_data_loss", false) ); - importRequest.timeout(request.paramAsTime("timeout", importRequest.timeout())); + importRequest.ackTimeout(request.paramAsTime("timeout", importRequest.ackTimeout())); importRequest.masterNodeTimeout(request.paramAsTime("master_timeout", importRequest.masterNodeTimeout())); return channel -> client.execute( diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAddIndexBlockAction.java index 96b7bf0100fd7..9498541d5a305 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAddIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAddIndexBlockAction.java @@ -44,7 +44,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC Strings.splitStringByCommaToArray(request.param("index")) ); addIndexBlockRequest.masterNodeTimeout(request.paramAsTime("master_timeout", addIndexBlockRequest.masterNodeTimeout())); - addIndexBlockRequest.timeout(request.paramAsTime("timeout", addIndexBlockRequest.timeout())); + addIndexBlockRequest.ackTimeout(request.paramAsTime("timeout", addIndexBlockRequest.ackTimeout())); addIndexBlockRequest.indicesOptions(IndicesOptions.fromRequest(request, addIndexBlockRequest.indicesOptions())); return channel -> client.admin().indices().addBlock(addIndexBlockRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java index 630b8c9c40509..ea906955785b4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java @@ -48,7 +48,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { CloseIndexRequest closeIndexRequest = new CloseIndexRequest(Strings.splitStringByCommaToArray(request.param("index"))); closeIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", closeIndexRequest.masterNodeTimeout())); - closeIndexRequest.timeout(request.paramAsTime("timeout", closeIndexRequest.timeout())); + closeIndexRequest.ackTimeout(request.paramAsTime("timeout", closeIndexRequest.ackTimeout())); closeIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, closeIndexRequest.indicesOptions())); String waitForActiveShards = request.param("wait_for_active_shards"); if ("index-setting".equalsIgnoreCase(waitForActiveShards)) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index 8a98550a75895..37106059b7b9e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -76,7 +76,7 @@ static CreateIndexRequest prepareRequestV7(RestRequest request) { createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE); } - createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout())); + createIndexRequest.ackTimeout(request.paramAsTime("timeout", createIndexRequest.ackTimeout())); createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout())); createIndexRequest.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); return createIndexRequest; @@ -115,7 +115,7 @@ static CreateIndexRequest prepareRequest(RestRequest request) { createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE); } - createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout())); + createIndexRequest.ackTimeout(request.paramAsTime("timeout", createIndexRequest.ackTimeout())); createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout())); createIndexRequest.waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java index 3f655591efc86..b39cd6ca0ded1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java @@ -39,7 +39,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(Strings.splitStringByCommaToArray(request.param("index"))); - deleteIndexRequest.timeout(request.paramAsTime("timeout", deleteIndexRequest.timeout())); + deleteIndexRequest.ackTimeout(request.paramAsTime("timeout", deleteIndexRequest.ackTimeout())); deleteIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteIndexRequest.masterNodeTimeout())); deleteIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, deleteIndexRequest.indicesOptions())); return channel -> client.admin().indices().delete(deleteIndexRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java index 3e6c177581d7d..67de902d50e91 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java @@ -40,7 +40,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] aliases = Strings.splitStringByCommaToArray(request.param("name")); IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); - indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout())); + indicesAliasesRequest.ackTimeout(request.paramAsTime("timeout", indicesAliasesRequest.ackTimeout())); indicesAliasesRequest.addAliasAction(AliasActions.remove().indices(indices).aliases(aliases)); indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java index ad83aa3603a27..7395f00d733b1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java @@ -98,7 +98,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); - indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout())); + indicesAliasesRequest.ackTimeout(request.paramAsTime("timeout", indicesAliasesRequest.ackTimeout())); indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout())); IndicesAliasesRequest.AliasActions aliasAction = AliasActions.add().indices(indices).alias(alias); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java index c30659128e32e..e603d7647966f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java @@ -39,7 +39,7 @@ public List routes() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout())); - indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout())); + indicesAliasesRequest.ackTimeout(request.paramAsTime("timeout", indicesAliasesRequest.ackTimeout())); try (XContentParser parser = request.contentParser()) { IndicesAliasesRequest.PARSER.parse(parser, indicesAliasesRequest, null); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java index 2f88d5ecb4372..4e00bbb5966b8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java @@ -40,7 +40,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { OpenIndexRequest openIndexRequest = new OpenIndexRequest(Strings.splitStringByCommaToArray(request.param("index"))); - openIndexRequest.timeout(request.paramAsTime("timeout", openIndexRequest.timeout())); + openIndexRequest.ackTimeout(request.paramAsTime("timeout", openIndexRequest.ackTimeout())); openIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", openIndexRequest.masterNodeTimeout())); openIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, openIndexRequest.indicesOptions())); String waitForActiveShards = request.param("wait_for_active_shards"); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index ca7d2cea4133d..5d4d913767fe4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -90,7 +90,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putMappingRequest.source(sourceAsMap); } - putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout())); + putMappingRequest.ackTimeout(request.paramAsTime("timeout", putMappingRequest.ackTimeout())); putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout())); putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions())); putMappingRequest.writeIndexOnly(request.paramAsBoolean("write_index_only", false)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index 12c3a28d79781..c72508a9bf646 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -49,7 +49,7 @@ public final RestChannelConsumer prepareRequest(final RestRequest request, final final ResizeRequest resizeRequest = new ResizeRequest(request.param("target"), request.param("index")); resizeRequest.setResizeType(getResizeType()); request.applyContentParser(resizeRequest::fromXContent); - resizeRequest.timeout(request.paramAsTime("timeout", resizeRequest.timeout())); + resizeRequest.ackTimeout(request.paramAsTime("timeout", resizeRequest.ackTimeout())); resizeRequest.masterNodeTimeout(request.paramAsTime("master_timeout", resizeRequest.masterNodeTimeout())); resizeRequest.setWaitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); return channel -> client.admin().indices().resizeIndex(resizeRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index 1718d9af7e5c8..59c37c2c015a6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -51,7 +51,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(includeTypeName, parser)); rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false)); rolloverIndexRequest.lazy(request.paramAsBoolean("lazy", false)); - rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); + rolloverIndexRequest.ackTimeout(request.paramAsTime("timeout", rolloverIndexRequest.ackTimeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); if (DataStream.isFailureStoreFeatureFlagEnabled()) { boolean failureStore = request.paramAsBoolean("target_failure_store", false); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java index 779cb229ca48b..222a22e5da3e3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java @@ -42,7 +42,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indices); - updateSettingsRequest.timeout(request.paramAsTime("timeout", updateSettingsRequest.timeout())); + updateSettingsRequest.ackTimeout(request.paramAsTime("timeout", updateSettingsRequest.ackTimeout())); updateSettingsRequest.setPreserveExisting(request.paramAsBoolean("preserve_existing", updateSettingsRequest.isPreserveExisting())); updateSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", updateSettingsRequest.masterNodeTimeout())); updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index 1258982f1c5fc..3049b9096004e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -37,7 +37,7 @@ public String getName() { public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { DeletePipelineRequest request = new DeletePipelineRequest(restRequest.param("id")); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); return channel -> client.admin().cluster().deletePipeline(request, new RestToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 4ae94eb6e9d34..30b3448a04883 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -55,7 +55,7 @@ public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient cl Tuple sourceTuple = restRequest.contentOrSourceParam(); PutPipelineRequest request = new PutPipelineRequest(restRequest.param("id"), sourceTuple.v2(), sourceTuple.v1(), ifVersion); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); return channel -> client.admin().cluster().putPipeline(request, new RestToXContentListener<>(channel)); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index d3a81df8f72d1..e9e2122c237c6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -99,7 +99,9 @@ public void testEqualsAndHashCode() { ClusterRerouteRequest copy = new ClusterRerouteRequest().add( request.getCommands().commands().toArray(new AllocationCommand[0]) ); - copy.dryRun(request.dryRun()).explain(request.explain()).timeout(request.timeout()).setRetryFailed(request.isRetryFailed()); + AcknowledgedRequest clusterRerouteRequestAcknowledgedRequest = copy.dryRun(request.dryRun()) + .explain(request.explain()); + clusterRerouteRequestAcknowledgedRequest.ackTimeout(request.ackTimeout()).setRetryFailed(request.isRetryFailed()); copy.masterNodeTimeout(request.masterNodeTimeout()); assertEquals(request, copy); assertEquals(copy, request); // Commutative @@ -122,10 +124,10 @@ public void testEqualsAndHashCode() { assertEquals(request.hashCode(), copy.hashCode()); // Changing timeout makes requests not equal - copy.timeout(timeValueMillis(request.timeout().millis() + 1)); + copy.ackTimeout(timeValueMillis(request.ackTimeout().millis() + 1)); assertNotEquals(request, copy); assertNotEquals(request.hashCode(), copy.hashCode()); - copy.timeout(request.timeout()); + copy.ackTimeout(request.ackTimeout()); assertEquals(request, copy); assertEquals(request.hashCode(), copy.hashCode()); @@ -193,8 +195,8 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep builder.field("dry_run", original.dryRun()); } params.put("explain", Boolean.toString(original.explain())); - if (false == original.timeout().equals(AcknowledgedRequest.DEFAULT_ACK_TIMEOUT) || randomBoolean()) { - params.put("timeout", original.timeout().toString()); + if (false == original.ackTimeout().equals(AcknowledgedRequest.DEFAULT_ACK_TIMEOUT) || randomBoolean()) { + params.put("timeout", original.ackTimeout().toString()); } if (original.isRetryFailed() || randomBoolean()) { params.put("retry_failed", Boolean.toString(original.isRetryFailed())); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java index d8bf60bcd76a3..a6d380bc7683c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -56,7 +56,7 @@ public void testSerializeRequest() throws IOException { req.dryRun(randomBoolean()); req.explain(randomBoolean()); req.add(new AllocateEmptyPrimaryAllocationCommand("foo", 1, "bar", randomBoolean())); - req.timeout(TimeValue.timeValueMillis(randomIntBetween(0, 100))); + req.ackTimeout(TimeValue.timeValueMillis(randomIntBetween(0, 100))); BytesStreamOutput out = new BytesStreamOutput(); req.writeTo(out); BytesReference bytes = out.bytes(); @@ -67,7 +67,7 @@ public void testSerializeRequest() throws IOException { assertEquals(req.isRetryFailed(), deserializedReq.isRetryFailed()); assertEquals(req.dryRun(), deserializedReq.dryRun()); assertEquals(req.explain(), deserializedReq.explain()); - assertEquals(req.timeout(), deserializedReq.timeout()); + assertEquals(req.ackTimeout(), deserializedReq.ackTimeout()); assertEquals(1, deserializedReq.getCommands().commands().size()); // allocation commands have their own tests assertEquals(req.getCommands().commands().size(), deserializedReq.getCommands().commands().size()); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestTests.java index 3e7e47908b207..f1cd54aa036ee 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestTests.java @@ -45,7 +45,7 @@ private IndicesAliasesRequest createTestInstance() { int numItems = randomIntBetween(0, 32); IndicesAliasesRequest request = new IndicesAliasesRequest(); if (randomBoolean()) { - request.timeout(randomTimeValue()); + request.ackTimeout(randomTimeValue()); } if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java index dc2f5f019e28b..b3caf93fbcddf 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestTests.java @@ -29,7 +29,7 @@ public void testSerialization() throws Exception { try (StreamInput in = out.bytes().streamInput()) { deserializedRequest = new CloseIndexRequest(in); } - assertEquals(request.timeout(), deserializedRequest.timeout()); + assertEquals(request.ackTimeout(), deserializedRequest.ackTimeout()); assertEquals(request.masterNodeTimeout(), deserializedRequest.masterNodeTimeout()); assertEquals(request.indicesOptions(), deserializedRequest.indicesOptions()); assertEquals(request.getParentTask(), deserializedRequest.getParentTask()); @@ -49,7 +49,7 @@ public void testBwcSerialization() throws Exception { in.setTransportVersion(out.getTransportVersion()); assertEquals(request.getParentTask(), TaskId.readFromStream(in)); assertEquals(request.masterNodeTimeout(), in.readTimeValue()); - assertEquals(request.timeout(), in.readTimeValue()); + assertEquals(request.ackTimeout(), in.readTimeValue()); assertArrayEquals(request.indices(), in.readStringArray()); final IndicesOptions indicesOptions = IndicesOptions.readIndicesOptions(in); // indices options are not equivalent when sent to an older version and re-read due @@ -75,7 +75,7 @@ public void testBwcSerialization() throws Exception { out.setTransportVersion(version); sample.getParentTask().writeTo(out); out.writeTimeValue(sample.masterNodeTimeout()); - out.writeTimeValue(sample.timeout()); + out.writeTimeValue(sample.ackTimeout()); out.writeStringArray(sample.indices()); sample.indicesOptions().writeIndicesOptions(out); if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { @@ -89,7 +89,7 @@ public void testBwcSerialization() throws Exception { } assertEquals(sample.getParentTask(), deserializedRequest.getParentTask()); assertEquals(sample.masterNodeTimeout(), deserializedRequest.masterNodeTimeout()); - assertEquals(sample.timeout(), deserializedRequest.timeout()); + assertEquals(sample.ackTimeout(), deserializedRequest.ackTimeout()); assertArrayEquals(sample.indices(), deserializedRequest.indices()); // indices options are not equivalent when sent to an older version and re-read due // to the addition of hidden indices as expand to hidden indices is always true when @@ -116,7 +116,7 @@ private CloseIndexRequest randomRequest() { ); } if (randomBoolean()) { - request.timeout(randomPositiveTimeValue()); + request.ackTimeout(randomPositiveTimeValue()); } if (randomBoolean()) { request.masterNodeTimeout(randomPositiveTimeValue()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java index 9ff323028e2c5..e0ef737493ea1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java @@ -33,7 +33,7 @@ protected UpdateSettingsRequest mutateInstance(UpdateSettingsRequest request) { List mutators = new ArrayList<>(); Supplier timeValueSupplier = () -> TimeValue.parseTimeValue(ESTestCase.randomTimeValue(), "_setting"); mutators.add(() -> mutation.masterNodeTimeout(randomValueOtherThan(request.masterNodeTimeout(), timeValueSupplier))); - mutators.add(() -> mutation.timeout(randomValueOtherThan(request.timeout(), timeValueSupplier))); + mutators.add(() -> mutation.ackTimeout(randomValueOtherThan(request.ackTimeout(), timeValueSupplier))); mutators.add(() -> mutation.settings(mutateSettings(request.settings()))); mutators.add(() -> mutation.indices(mutateIndices(request.indices()))); mutators.add( @@ -65,7 +65,7 @@ public static UpdateSettingsRequest createTestItem() { ? new UpdateSettingsRequest(randomSettings(0, 2)) : new UpdateSettingsRequest(randomSettings(0, 2), randomIndicesNames(0, 2)); request.masterNodeTimeout(randomTimeValue()); - request.timeout(randomTimeValue()); + request.ackTimeout(randomTimeValue()); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); request.setPreserveExisting(randomBoolean()); request.reopen(randomBoolean()); @@ -75,7 +75,7 @@ public static UpdateSettingsRequest createTestItem() { private static UpdateSettingsRequest copyRequest(UpdateSettingsRequest request) { UpdateSettingsRequest result = new UpdateSettingsRequest(request.settings(), request.indices()); result.masterNodeTimeout(request.masterNodeTimeout()); - result.timeout(request.timeout()); + result.ackTimeout(request.ackTimeout()); result.indicesOptions(request.indicesOptions()); result.setPreserveExisting(request.isPreserveExisting()); return result; diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/PrimaryFollowerAllocationIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/PrimaryFollowerAllocationIT.java index bab0bbdff02e2..50750f629b993 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/PrimaryFollowerAllocationIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/PrimaryFollowerAllocationIT.java @@ -62,7 +62,7 @@ public void testDoNotAllocateFollowerPrimaryToNodesWithoutRemoteClusterClientRol .build() ); putFollowRequest.waitForActiveShards(ActiveShardCount.ONE); - putFollowRequest.timeout(TimeValue.timeValueSeconds(2)); + putFollowRequest.ackTimeout(TimeValue.timeValueSeconds(2)); final PutFollowAction.Response response = followerClient().execute(PutFollowAction.INSTANCE, putFollowRequest).get(); assertFalse(response.isFollowIndexShardsAcked()); assertFalse(response.isIndexFollowingStarted()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 770e90e74c450..0a0cade089fab 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -306,7 +306,7 @@ private void initiateFollowing( clusterService, new String[] { request.getFollowerIndex() }, request.waitForActiveShards(), - request.timeout(), + request.ackTimeout(), l.map(result -> new PutFollowAction.Response(true, result, r.isAcknowledged())) ) ) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java index cc7dace36e08e..044866ad07cb5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java @@ -36,7 +36,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { AcknowledgedRequest.Plain deleteLicenseRequest = new AcknowledgedRequest.Plain(); - deleteLicenseRequest.timeout(request.paramAsTime("timeout", deleteLicenseRequest.timeout())); + deleteLicenseRequest.ackTimeout(request.paramAsTime("timeout", deleteLicenseRequest.ackTimeout())); deleteLicenseRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteLicenseRequest.masterNodeTimeout())); return channel -> client.admin() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java index 38a7ea7f7da29..b3e436d83165d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPostStartBasicLicense.java @@ -33,7 +33,7 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { PostStartBasicRequest startBasicRequest = new PostStartBasicRequest(); startBasicRequest.acknowledge(request.paramAsBoolean("acknowledge", false)); - startBasicRequest.timeout(request.paramAsTime("timeout", startBasicRequest.timeout())); + startBasicRequest.ackTimeout(request.paramAsTime("timeout", startBasicRequest.ackTimeout())); startBasicRequest.masterNodeTimeout(request.paramAsTime("master_timeout", startBasicRequest.masterNodeTimeout())); return channel -> client.execute( PostStartBasicAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java index 2990f7d2a42b1..413c0d5ba0732 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestPutLicenseAction.java @@ -45,7 +45,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC PutLicenseRequest putLicenseRequest = new PutLicenseRequest(); putLicenseRequest.license(request.content(), request.getXContentType()); putLicenseRequest.acknowledge(request.paramAsBoolean("acknowledge", false)); - putLicenseRequest.timeout(request.paramAsTime("timeout", putLicenseRequest.timeout())); + putLicenseRequest.ackTimeout(request.paramAsTime("timeout", putLicenseRequest.ackTimeout())); putLicenseRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putLicenseRequest.masterNodeTimeout())); if (License.LicenseType.isBasic(putLicenseRequest.license().type())) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index 61e76d2131057..48323692b7915 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -48,7 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request() { - timeout(DEFAULT_TIMEOUT); + ackTimeout(DEFAULT_TIMEOUT); } public Request(String id) { @@ -78,7 +78,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteDataFrameAnalyticsAction.Request request = (DeleteDataFrameAnalyticsAction.Request) o; - return Objects.equals(id, request.id) && force == request.force && Objects.equals(timeout, request.timeout); + return Objects.equals(id, request.id) && force == request.force && Objects.equals(ackTimeout(), request.ackTimeout()); } @Override @@ -90,7 +90,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int hashCode() { - return Objects.hash(id, force, timeout); + return Objects.hash(id, force, ackTimeout()); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java index a7a99836cdaee..143e07fb750f8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java @@ -52,7 +52,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, @Override public int hashCode() { - return Objects.hash(timeout); + return Objects.hash(ackTimeout()); } @Override @@ -64,7 +64,7 @@ public boolean equals(Object obj) { return false; } GetMlAutoscalingStats.Request other = (GetMlAutoscalingStats.Request) obj; - return Objects.equals(timeout, other.timeout); + return Objects.equals(ackTimeout(), other.ackTimeout()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index b598d398025e0..bb113a9b3e1e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -46,7 +46,7 @@ public static class Request extends AcknowledgedRequest implements ToXC static { PARSER.declareString(Request::setDeploymentId, MODEL_ID); PARSER.declareInt(Request::setNumberOfAllocations, NUMBER_OF_ALLOCATIONS); - PARSER.declareString((r, val) -> r.timeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); + PARSER.declareString((r, val) -> r.ackTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); } public static Request parseRequest(String deploymentId, XContentParser parser) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java index ef61187757445..3623c659216d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java @@ -76,7 +76,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(timeout(), id, force, deleteDestIndex); + return Objects.hash(ackTimeout(), id, force, deleteDestIndex); } @Override @@ -93,7 +93,7 @@ public boolean equals(Object obj) { return Objects.equals(id, other.id) && force == other.force && deleteDestIndex == other.deleteDestIndex - && timeout().equals(other.timeout()); + && ackTimeout().equals(other.ackTimeout()); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java index 5e28e74c2e063..9d335b2ccdb34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java @@ -136,7 +136,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(timeout(), config, deferValidation); + return Objects.hash(ackTimeout(), config, deferValidation); } @Override @@ -152,7 +152,7 @@ public boolean equals(Object obj) { // the base class does not implement equals, therefore we need to check timeout ourselves return Objects.equals(config, other.config) && this.deferValidation == other.deferValidation - && timeout().equals(other.timeout()); + && ackTimeout().equals(other.ackTimeout()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java index 9d77ffdc0c218..609dd33cbfa9e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java @@ -63,7 +63,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(timeout(), id, force); + return Objects.hash(ackTimeout(), id, force); } @Override @@ -76,7 +76,7 @@ public boolean equals(Object obj) { } Request other = (Request) obj; // the base class does not implement equals, therefore we need to check timeout ourselves - return Objects.equals(id, other.id) && force == other.force && timeout().equals(other.timeout()); + return Objects.equals(id, other.id) && force == other.force && ackTimeout().equals(other.ackTimeout()); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java index 268098c092b0c..3ecadd1b708cc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java @@ -74,7 +74,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(timeout(), id, from); + return Objects.hash(ackTimeout(), id, from); } @Override @@ -87,7 +87,7 @@ public boolean equals(Object obj) { } Request other = (Request) obj; // the base class does not implement equals, therefore we need to check timeout ourselves - return Objects.equals(id, other.id) && Objects.equals(from, other.from) && timeout().equals(other.timeout()); + return Objects.equals(id, other.id) && Objects.equals(from, other.from) && ackTimeout().equals(other.ackTimeout()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java index b7dea916accbc..3a36d9163e0c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java @@ -57,7 +57,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(timeout(), dryRun); + return Objects.hash(ackTimeout(), dryRun); } @Override @@ -71,7 +71,7 @@ public boolean equals(Object obj) { Request other = (Request) obj; // the base class does not implement equals, therefore we need to check timeout ourselves - return this.dryRun == other.dryRun && timeout().equals(other.timeout()); + return this.dryRun == other.dryRun && ackTimeout().equals(other.ackTimeout()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java index 10e086335825f..de6435ad31dbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java @@ -86,13 +86,13 @@ public boolean equals(Object obj) { Request that = (Request) obj; // the base class does not implement equals, therefore we need to check timeout ourselves - return Objects.equals(config, that.config) && deferValidation == that.deferValidation && timeout().equals(that.timeout()); + return Objects.equals(config, that.config) && deferValidation == that.deferValidation && ackTimeout().equals(that.ackTimeout()); } @Override public int hashCode() { // the base class does not implement hashCode, therefore we need to hash timeout ourselves - return Objects.hash(timeout(), config, deferValidation); + return Objects.hash(ackTimeout(), config, deferValidation); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsActionRequestTests.java index 306d5ff67c26c..bb78d27fb3054 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsActionRequestTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -21,7 +22,7 @@ protected Request createTestInstance() { Request request = new Request(randomAlphaOfLength(10)); request.setForce(randomBoolean()); if (randomBoolean()) { - request.timeout(TimeValue.parseTimeValue(randomTimeValue(), "test")); + request.ackTimeout(TimeValue.parseTimeValue(randomTimeValue(), "test")); } return request; } @@ -37,6 +38,7 @@ protected Writeable.Reader instanceReader() { } public void testDefaultTimeout() { - assertThat(createTestInstance().timeout(), is(notNullValue())); + AcknowledgedRequest requestAcknowledgedRequest = createTestInstance(); + assertThat(requestAcknowledgedRequest.ackTimeout(), is(notNullValue())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java index 97c8f2bfc3473..23e3116d67465 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java @@ -28,6 +28,6 @@ protected Request createTestInstance() { @Override protected Request mutateInstance(Request instance) throws IOException { - return new Request(TimeValue.timeValueMillis(instance.timeout().millis() + randomIntBetween(1, 1000))); + return new Request(TimeValue.timeValueMillis(instance.ackTimeout().millis() + randomIntBetween(1, 1000))); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsActionRequestTests.java index f5c5be73f2b7b..e7146c5d89592 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsActionRequestTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; @@ -121,6 +122,7 @@ public void testValidate_GivenRequestWithIncludedAnalyzedFieldThatIsIncludedInSo } public void testDefaultTimeout() { - assertThat(createTestInstance().timeout(), is(notNullValue())); + AcknowledgedRequest requestAcknowledgedRequest = createTestInstance(); + assertThat(requestAcknowledgedRequest.ackTimeout(), is(notNullValue())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsActionRequestTests.java index 5597bf72b9658..9c4b17a30fbe2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsActionRequestTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -64,6 +65,7 @@ public void testParseRequest() throws IOException { } public void testDefaultTimeout() { - assertThat(createTestInstance().timeout(), is(notNullValue())); + AcknowledgedRequest requestAcknowledgedRequest = createTestInstance(); + assertThat(requestAcknowledgedRequest.ackTimeout(), is(notNullValue())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformActionRequestTests.java index dfbd5009932e1..008c7c4447cb7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformActionRequestTests.java @@ -33,7 +33,7 @@ protected Request mutateInstance(Request instance) { String id = instance.getId(); boolean force = instance.isForce(); boolean deleteDestIndex = instance.isDeleteDestIndex(); - TimeValue timeout = instance.timeout(); + TimeValue timeout = instance.ackTimeout(); switch (between(0, 3)) { case 0 -> id += randomAlphaOfLengthBetween(1, 5); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PutTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PutTransformActionRequestTests.java index 51f070a19dd6c..75fe307c2007b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PutTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PutTransformActionRequestTests.java @@ -37,7 +37,7 @@ protected Request createTestInstance() { protected Request mutateInstance(Request instance) { TransformConfig config = instance.getConfig(); boolean deferValidation = instance.isDeferValidation(); - TimeValue timeout = instance.timeout(); + TimeValue timeout = instance.ackTimeout(); switch (between(0, 2)) { case 0 -> config = new TransformConfig.Builder(config).setId(config.getId() + randomAlphaOfLengthBetween(1, 5)).build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/StartTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/StartTransformActionRequestTests.java index 85cd9b34827b5..86cf4aaf17e2e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/StartTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/StartTransformActionRequestTests.java @@ -34,7 +34,7 @@ protected Writeable.Reader instanceReader() { protected Request mutateInstance(Request instance) { String id = instance.getId(); Instant from = instance.from(); - TimeValue timeout = instance.timeout(); + TimeValue timeout = instance.ackTimeout(); switch (between(0, 2)) { case 0 -> id += randomAlphaOfLengthBetween(1, 5); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsActionRequestTests.java index 27d36f4e0f0d8..25b50b163779a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsActionRequestTests.java @@ -27,7 +27,7 @@ protected Request createTestInstance() { @Override protected Request mutateInstance(Request instance) { boolean dryRun = instance.isDryRun(); - TimeValue timeout = instance.timeout(); + TimeValue timeout = instance.ackTimeout(); switch (between(0, 1)) { case 0 -> dryRun ^= true; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformActionRequestTests.java index c780d2bfc8a73..9d12e1537abde 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformActionRequestTests.java @@ -30,7 +30,7 @@ protected Reader instanceReader() { protected Request mutateInstance(Request instance) { TransformConfig config = instance.getConfig(); boolean deferValidation = instance.isDeferValidation(); - TimeValue timeout = instance.timeout(); + TimeValue timeout = instance.ackTimeout(); switch (between(0, 2)) { case 0 -> config = new TransformConfig.Builder(config).setId(config.getId() + randomAlphaOfLengthBetween(1, 5)).build(); diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java index 535fb4c422870..58e1eb28952b0 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java @@ -115,7 +115,7 @@ protected void masterOperation(Task task, FreezeRequest request, ClusterState st } final CloseIndexClusterStateUpdateRequest closeRequest = new CloseIndexClusterStateUpdateRequest(task.getId()).ackTimeout( - request.timeout() + request.ackTimeout() ).masterNodeTimeout(request.masterNodeTimeout()).indices(concreteIndices); indexStateService.closeIndices(closeRequest, new ActionListener<>() { @@ -145,7 +145,7 @@ private void toggleFrozenSettings( submitUnbatchedTask( "toggle-frozen-settings", new AckedClusterStateUpdateTask(Priority.URGENT, request, listener.delegateFailure((delegate, acknowledgedResponse) -> { - OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest().ackTimeout(request.timeout()) + OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest().ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices) .waitForActiveShards(request.waitForActiveShards()); diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java index 69726962bf6d2..29c008d4c3128 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/rest/action/RestFreezeIndexAction.java @@ -62,7 +62,7 @@ public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBui } FreezeRequest freezeRequest = new FreezeRequest(Strings.splitStringByCommaToArray(request.param("index"))); - freezeRequest.timeout(request.paramAsTime("timeout", freezeRequest.timeout())); + freezeRequest.ackTimeout(request.paramAsTime("timeout", freezeRequest.ackTimeout())); freezeRequest.masterNodeTimeout(request.paramAsTime("master_timeout", freezeRequest.masterNodeTimeout())); freezeRequest.indicesOptions(IndicesOptions.fromRequest(request, freezeRequest.indicesOptions())); String waitForActiveShards = request.param("wait_for_active_shards"); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java index 60b2202930338..9cd598f85c00f 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestDeleteLifecycleAction.java @@ -33,7 +33,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String lifecycleName = restRequest.param("name"); DeleteLifecycleAction.Request deleteLifecycleRequest = new DeleteLifecycleAction.Request(lifecycleName); - deleteLifecycleRequest.timeout(restRequest.paramAsTime("timeout", deleteLifecycleRequest.timeout())); + deleteLifecycleRequest.ackTimeout(restRequest.paramAsTime("timeout", deleteLifecycleRequest.ackTimeout())); deleteLifecycleRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", deleteLifecycleRequest.masterNodeTimeout())); return channel -> client.execute(DeleteLifecycleAction.INSTANCE, deleteLifecycleRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java index f5221ba980440..9631558649ed0 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetLifecycleAction.java @@ -35,7 +35,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String[] lifecycleNames = Strings.splitStringByCommaToArray(restRequest.param("name")); GetLifecycleAction.Request getLifecycleRequest = new GetLifecycleAction.Request(lifecycleNames); - getLifecycleRequest.timeout(restRequest.paramAsTime("timeout", getLifecycleRequest.timeout())); + getLifecycleRequest.ackTimeout(restRequest.paramAsTime("timeout", getLifecycleRequest.ackTimeout())); getLifecycleRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", getLifecycleRequest.masterNodeTimeout())); return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute( diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java index 186c7bd07961d..1721510fe7f87 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestGetStatusAction.java @@ -33,7 +33,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { AcknowledgedRequest.Plain request = new AcknowledgedRequest.Plain(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(GetStatusAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java index 6740d7a2cd4b4..6d9300f2c6088 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient try (XContentParser parser = restRequest.contentParser()) { request = TransportMoveToStepAction.Request.parseRequest(index, parser); } - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(ILMActions.MOVE_TO_STEP, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java index 280f3a609e604..6240ba1a97574 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestPutLifecycleAction.java @@ -37,7 +37,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String lifecycleName = restRequest.param("name"); try (XContentParser parser = restRequest.contentParser()) { PutLifecycleRequest putLifecycleRequest = PutLifecycleRequest.parseRequest(lifecycleName, parser); - putLifecycleRequest.timeout(restRequest.paramAsTime("timeout", putLifecycleRequest.timeout())); + putLifecycleRequest.ackTimeout(restRequest.paramAsTime("timeout", putLifecycleRequest.ackTimeout())); putLifecycleRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", putLifecycleRequest.masterNodeTimeout())); return channel -> client.execute(ILMActions.PUT, putLifecycleRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java index bfd186ab5b6ba..87c6fd908ad0e 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestRetryAction.java @@ -36,7 +36,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String[] indices = Strings.splitStringByCommaToArray(restRequest.param("index")); TransportRetryAction.Request request = new TransportRetryAction.Request(indices); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.indices(indices); request.indicesOptions(IndicesOptions.fromRequest(restRequest, IndicesOptions.strictExpandOpen())); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java index a130f255f3dfb..7be09625cc799 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStartILMAction.java @@ -33,7 +33,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { StartILMRequest request = new StartILMRequest(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(ILMActions.START, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java index e2bb6c20ab6ec..cb71a7ecb10b9 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestStopAction.java @@ -33,7 +33,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { StopILMRequest request = new StopILMRequest(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(ILMActions.STOP, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java index efd65b5ac3282..24c1623f7cc02 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java @@ -125,7 +125,7 @@ private static void stopJob( // still used from the running task which results in logging errors. StopDataFrameAnalyticsAction.Request stopRequest = new StopDataFrameAnalyticsAction.Request(request.getId()); - stopRequest.setTimeout(request.timeout()); + stopRequest.setTimeout(request.ackTimeout()); ActionListener normalStopListener = ActionListener.wrap( listener::onResponse, @@ -169,7 +169,7 @@ private void normalDelete( configProvider.get(id, listener.delegateFailureAndWrap((l, config) -> { DataFrameAnalyticsDeleter deleter = new DataFrameAnalyticsDeleter(parentTaskClient, auditor); - deleter.deleteAllDocuments(config, request.timeout(), l); + deleter.deleteAllDocuments(config, request.ackTimeout(), l); })); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 104c92fe5dfd7..9fcfc2472bf39 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -322,7 +322,7 @@ private void deleteDatafeedIfNecessary(DeleteJobAction.Request deleteJobRequest, } DeleteDatafeedAction.Request deleteDatafeedRequest = new DeleteDatafeedAction.Request(datafeedIds.iterator().next()); deleteDatafeedRequest.setForce(deleteJobRequest.isForce()); - deleteDatafeedRequest.timeout(deleteJobRequest.timeout()); + deleteDatafeedRequest.ackTimeout(deleteJobRequest.ackTimeout()); ClientHelper.executeAsyncWithOrigin( client, ClientHelper.ML_ORIGIN, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java index bcf3c1f58cfa9..7d8567eb32f40 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java @@ -111,7 +111,12 @@ protected void masterOperation( logger.debug(() -> format("[%s] Request to delete trained model%s", request.getId(), request.isForce() ? " (force)" : "")); String id = request.getId(); - cancelDownloadTask(client, id, listener.delegateFailureAndWrap((l, ignored) -> deleteModel(request, state, l)), request.timeout()); + cancelDownloadTask( + client, + id, + listener.delegateFailureAndWrap((l, ignored) -> deleteModel(request, state, l)), + request.ackTimeout() + ); } // package-private for testing diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java index 3223a7c7863f3..bf5b6dfbc53e7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java @@ -103,11 +103,11 @@ protected void masterOperation( .clear() .setOs(true) .setJvm(true) - .setTimeout(request.timeout()) + .setTimeout(request.ackTimeout()) .execute(delegate.delegateFailureAndWrap((delegate2, nodesStatsResponse) -> { TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) - ).timeout(request.timeout()); + ).timeout(request.ackTimeout()); parentTaskClient.execute( TrainedModelCacheInfoAction.INSTANCE, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index a272aea59cbc4..87fc956c224cc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -329,7 +329,7 @@ protected void masterOperation( request.isWaitForCompletion(), finalResponseListener, handlePackageAndTagsListener, - request.timeout() + request.ackTimeout() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java index d7071e4f973cc..343bab2f55bba 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java @@ -156,7 +156,7 @@ private void waitExistingResetTaskToComplete( GetTaskRequest getTaskRequest = new GetTaskRequest(); getTaskRequest.setTaskId(existingTaskId); getTaskRequest.setWaitForCompletion(true); - getTaskRequest.setTimeout(request.timeout()); + getTaskRequest.setTimeout(request.ackTimeout()); executeAsyncWithOrigin(client, ML_ORIGIN, TransportGetTaskAction.TYPE, getTaskRequest, ActionListener.wrap(getTaskResponse -> { TaskResult taskResult = getTaskResponse.getTask(); if (taskResult.isCompleted()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index 9d5abbfc06f65..0f9a416121182 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -210,7 +210,7 @@ private void isBlocked(Job job, RevertModelSnapshotAction.Request request, Actio // to give a chance to this request to be executed without returning an error. // This is particularly useful when a relocating job is calling revert. getTaskRequest.setWaitForCompletion(request.isForce()); - getTaskRequest.setTimeout(request.timeout()); + getTaskRequest.setTimeout(request.ackTimeout()); executeAsyncWithOrigin( client, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java index d9aeb8004cc7d..076573bb61b90 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetUpgradeModeAction.java @@ -154,7 +154,7 @@ protected void masterOperation( // There is a chance that we failed un-allocating a task due to allocation_id being changed // This call will timeout in that case and return an error .setWaitForCompletion(true) - .setTimeout(request.timeout()) + .setTimeout(request.ackTimeout()) .execute(ActionListener.wrap(r -> { try { // Handle potential node timeouts, @@ -228,7 +228,7 @@ protected void masterOperation( persistentTasksCustomMetadata -> persistentTasksCustomMetadata.tasks() .stream() .noneMatch(t -> ML_TASK_NAMES.contains(t.getTaskName()) && t.getAssignment().equals(AWAITING_UPGRADE)), - request.timeout(), + request.ackTimeout(), ActionListener.wrap(r -> { logger.info("Done waiting for tasks to be out of AWAITING_UPGRADE"); wrappedListener.onResponse(AcknowledgedResponse.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index c50e744bde96b..89180cba77dfd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -508,7 +508,7 @@ public void tryAction(ActionListener listener) { ResetJobAction.Request request = new ResetJobAction.Request(jobTask.getJobId()); request.setSkipJobStateValidation(true); request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + request.ackTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); executeAsyncWithOrigin( client, ML_ORIGIN, @@ -525,7 +525,7 @@ public void tryAction(ActionListener listener) { request.setForce(true); request.setDeleteInterveningResults(true); request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + request.ackTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); executeAsyncWithOrigin( client, ML_ORIGIN, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java index 9e3e870688814..d58e06a35dcaa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java @@ -48,7 +48,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } MlMemoryAction.Request request = new MlMemoryAction.Request(nodeId); request.masterNodeTimeout(restRequest.paramAsTime(MASTER_TIMEOUT, request.masterNodeTimeout())); - request.timeout(restRequest.paramAsTime(TIMEOUT, request.timeout())); + request.ackTimeout(restRequest.paramAsTime(TIMEOUT, request.ackTimeout())); return channel -> client.execute(MlMemoryAction.INSTANCE, request, new NodesResponseRestListener<>(channel)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java index f6ec5e75b7821..f60b8581e3769 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java @@ -42,7 +42,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { SetUpgradeModeAction.Request request = new SetUpgradeModeAction.Request(restRequest.paramAsBoolean("enabled", false)); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(SetUpgradeModeAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java index 2bf37d3722df7..d9ea3b006b7e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestDeleteDatafeedAction.java @@ -48,7 +48,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient if (restRequest.hasParam(DeleteDatafeedAction.Request.FORCE.getPreferredName())) { request.setForce(restRequest.paramAsBoolean(CloseJobAction.Request.FORCE.getPreferredName(), request.isForce())); } - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(DeleteDatafeedAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java index 64981805717a1..c216afef89fdb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java @@ -51,7 +51,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient try (XContentParser parser = restRequest.contentParser()) { putDatafeedRequest = PutDatafeedAction.Request.parseRequest(datafeedId, indicesOptions, parser); } - putDatafeedRequest.timeout(restRequest.paramAsTime("timeout", putDatafeedRequest.timeout())); + putDatafeedRequest.ackTimeout(restRequest.paramAsTime("timeout", putDatafeedRequest.ackTimeout())); putDatafeedRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", putDatafeedRequest.masterNodeTimeout())); return channel -> client.execute(PutDatafeedAction.INSTANCE, putDatafeedRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java index 97e1514441441..f51f37715cdc4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java @@ -57,7 +57,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient try (XContentParser parser = restRequest.contentParser()) { updateDatafeedRequest = UpdateDatafeedAction.Request.parseRequest(datafeedId, indicesOptions, parser); } - updateDatafeedRequest.timeout(restRequest.paramAsTime("timeout", updateDatafeedRequest.timeout())); + updateDatafeedRequest.ackTimeout(restRequest.paramAsTime("timeout", updateDatafeedRequest.ackTimeout())); updateDatafeedRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", updateDatafeedRequest.masterNodeTimeout())); return channel -> client.execute(UpdateDatafeedAction.INSTANCE, updateDatafeedRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java index 3fdbe2a959796..bd66619a1238e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestDeleteDataFrameAnalyticsAction.java @@ -39,7 +39,9 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(DataFrameAnalyticsConfig.ID.getPreferredName()); DeleteDataFrameAnalyticsAction.Request request = new DeleteDataFrameAnalyticsAction.Request(id); request.setForce(restRequest.paramAsBoolean(DeleteDataFrameAnalyticsAction.Request.FORCE.getPreferredName(), request.isForce())); - request.timeout(restRequest.paramAsTime(DeleteDataFrameAnalyticsAction.Request.TIMEOUT.getPreferredName(), request.timeout())); + request.ackTimeout( + restRequest.paramAsTime(DeleteDataFrameAnalyticsAction.Request.TIMEOUT.getPreferredName(), request.ackTimeout()) + ); return channel -> client.execute(DeleteDataFrameAnalyticsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java index 52a3d83eeb11a..fe96ce53d7b91 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java @@ -52,7 +52,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient try (XContentParser parser = restRequest.contentParser()) { updateRequest = UpdateDataFrameAnalyticsAction.Request.parseRequest(id, parser); } - updateRequest.timeout(restRequest.paramAsTime("timeout", updateRequest.timeout())); + updateRequest.ackTimeout(restRequest.paramAsTime("timeout", updateRequest.ackTimeout())); return channel -> client.execute(UpdateDataFrameAnalyticsAction.INSTANCE, updateRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java index 896b1dfdb6df2..d6a578a63e10e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java @@ -61,7 +61,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient try (XContentParser parser = restRequest.contentParser()) { putRequest = PutDataFrameAnalyticsAction.Request.parseRequest(id, parser); } - putRequest.timeout(restRequest.paramAsTime("timeout", putRequest.timeout())); + putRequest.ackTimeout(restRequest.paramAsTime("timeout", putRequest.ackTimeout())); return channel -> client.execute(PutDataFrameAnalyticsAction.INSTANCE, putRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java index 9ed8d2c13b04c..d26d0091e1acf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestDeleteFilterAction.java @@ -43,7 +43,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { Request request = new Request(restRequest.param(Request.FILTER_ID.getPreferredName())); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(DeleteFilterAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java index b50157f87552f..e010bd67dff75 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java @@ -48,7 +48,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient DeleteTrainedModelAction.Request request = new DeleteTrainedModelAction.Request(modelId); if (restRequest.hasParam(TIMEOUT.getPreferredName())) { TimeValue timeout = restRequest.paramAsTime(TIMEOUT.getPreferredName(), AcknowledgedRequest.DEFAULT_ACK_TIMEOUT); - request.timeout(timeout); + request.ackTimeout(timeout); } request.setForce(restRequest.paramAsBoolean(DeleteTrainedModelAction.Request.FORCE.getPreferredName(), request.isForce())); return channel -> client.execute(DeleteTrainedModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java index c07afb42b8471..9fcad6207c54b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java @@ -52,7 +52,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient waitForCompletion, parser ); - putRequest.timeout(restRequest.paramAsTime("timeout", putRequest.timeout())); + putRequest.ackTimeout(restRequest.paramAsTime("timeout", putRequest.ackTimeout())); return channel -> client.execute(PutTrainedModelAction.INSTANCE, putRequest, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestUpdateTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestUpdateTrainedModelDeploymentAction.java index d70445605d3f5..db21a4278df24 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestUpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestUpdateTrainedModelDeploymentAction.java @@ -50,7 +50,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String modelId = restRequest.param(StartTrainedModelDeploymentAction.Request.MODEL_ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); UpdateTrainedModelDeploymentAction.Request request = UpdateTrainedModelDeploymentAction.Request.parseRequest(modelId, parser); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(UpdateTrainedModelDeploymentAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java index 4990c1cbaa61d..35428b6617c24 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java @@ -54,7 +54,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String jobId = restRequest.param(Job.ID.getPreferredName()); String forecastId = restRequest.param(Forecast.FORECAST_ID.getPreferredName(), Metadata.ALL); final DeleteForecastAction.Request request = new DeleteForecastAction.Request(jobId, forecastId); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.setAllowNoForecasts(restRequest.paramAsBoolean("allow_no_forecasts", request.isAllowNoForecasts())); return channel -> client.execute(DeleteForecastAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java index 989f8766e8f82..81cc9ab036bb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteJobAction.java @@ -50,7 +50,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { DeleteJobAction.Request deleteJobRequest = new DeleteJobAction.Request(restRequest.param(Job.ID.getPreferredName())); deleteJobRequest.setForce(restRequest.paramAsBoolean(CloseJobAction.Request.FORCE.getPreferredName(), deleteJobRequest.isForce())); - deleteJobRequest.timeout(restRequest.paramAsTime("timeout", deleteJobRequest.timeout())); + deleteJobRequest.ackTimeout(restRequest.paramAsTime("timeout", deleteJobRequest.ackTimeout())); deleteJobRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", deleteJobRequest.masterNodeTimeout())); deleteJobRequest.setDeleteUserAnnotations(restRequest.paramAsBoolean("delete_user_annotations", false)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java index 2bf27f29b2f36..6add232cfecb8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPostJobUpdateAction.java @@ -46,7 +46,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String jobId = restRequest.param(Job.ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); UpdateJobAction.Request updateJobRequest = UpdateJobAction.Request.parseRequest(jobId, parser); - updateJobRequest.timeout(restRequest.paramAsTime("timeout", updateJobRequest.timeout())); + updateJobRequest.ackTimeout(restRequest.paramAsTime("timeout", updateJobRequest.ackTimeout())); updateJobRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", updateJobRequest.masterNodeTimeout())); return channel -> client.execute(UpdateJobAction.INSTANCE, updateJobRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java index 5f8a6d98c23c5..7bff218114b71 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestPutJobAction.java @@ -49,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient XContentParser parser = restRequest.contentParser(); IndicesOptions indicesOptions = IndicesOptions.fromRequest(restRequest, SearchRequest.DEFAULT_INDICES_OPTIONS); PutJobAction.Request putJobRequest = PutJobAction.Request.parseRequest(jobId, parser, indicesOptions); - putJobRequest.timeout(restRequest.paramAsTime("timeout", putJobRequest.timeout())); + putJobRequest.ackTimeout(restRequest.paramAsTime("timeout", putJobRequest.ackTimeout())); putJobRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", putJobRequest.masterNodeTimeout())); return channel -> client.execute(PutJobAction.INSTANCE, putJobRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestResetJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestResetJobAction.java index 62ffd65cec3cd..28271ec578fb0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestResetJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestResetJobAction.java @@ -43,7 +43,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { ResetJobAction.Request request = new ResetJobAction.Request(restRequest.param(Job.ID.getPreferredName())); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.setDeleteUserAnnotations(restRequest.paramAsBoolean("delete_user_annotations", false)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java index f1411db1e99e8..4478552a22a9e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestRevertModelSnapshotAction.java @@ -65,7 +65,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient ) ); } - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(RevertModelSnapshotAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java index 27fe2b8acb79b..fad82d7491d15 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/action/GetStatusActionIT.java @@ -33,7 +33,7 @@ public void testTimeoutIfResourcesNotCreated() throws Exception { GetStatusAction.Request request = new GetStatusAction.Request(); request.waitForResourcesCreated(true); // shorter than the default timeout to avoid excessively long execution - request.timeout(TimeValue.timeValueSeconds(15)); + request.ackTimeout(TimeValue.timeValueSeconds(15)); GetStatusAction.Response response = client().execute(GetStatusAction.INSTANCE, request).get(); assertEquals(RestStatus.REQUEST_TIMEOUT, response.status()); @@ -56,7 +56,7 @@ public void testWaitsUntilResourcesAreCreated() throws Exception { updateProfilingTemplatesEnabled(true); GetStatusAction.Request request = new GetStatusAction.Request(); // higher timeout since we have more shards than usual - request.timeout(TimeValue.timeValueSeconds(120)); + request.ackTimeout(TimeValue.timeValueSeconds(120)); request.waitForResourcesCreated(true); GetStatusAction.Response response = client().execute(GetStatusAction.INSTANCE, request).get(); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index 88f19a62bbedf..d0cc86e3d5da8 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -77,7 +77,7 @@ protected void masterOperation( ActionListener listener ) { if (request.waitForResourcesCreated()) { - createAndRegisterListener(listener, request.timeout()); + createAndRegisterListener(listener, request.ackTimeout()); } else { resolver.execute(state, listener); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java index 2d5cc7a71669c..e1f8ac16f15ec 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/rest/RestGetStatusAction.java @@ -35,7 +35,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { GetStatusAction.Request request = new GetStatusAction.Request(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); request.waitForResourcesCreated(restRequest.paramAsBoolean("wait_for_resources_created", false)); return channel -> client.execute( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java index 216444cf51845..fc38bf16da8ce 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportUpdateSecuritySettingsAction.java @@ -82,21 +82,21 @@ protected void masterOperation( createUpdateSettingsRequest( SECURITY_MAIN_ALIAS, Settings.builder().loadFromMap(request.mainIndexSettings()).build(), - request.timeout(), + request.ackTimeout(), request.masterNodeTimeout(), state ), createUpdateSettingsRequest( SECURITY_TOKENS_ALIAS, Settings.builder().loadFromMap(request.tokensIndexSettings()).build(), - request.timeout(), + request.ackTimeout(), request.masterNodeTimeout(), state ), createUpdateSettingsRequest( SECURITY_PROFILE_ALIAS, Settings.builder().loadFromMap(request.profilesIndexSettings()).build(), - request.timeout(), + request.ackTimeout(), request.masterNodeTimeout(), state ) diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java index bebf7b176600f..5d9a692a0876c 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestDeleteSnapshotLifecycleAction.java @@ -36,7 +36,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { String lifecycleId = request.param("name"); DeleteSnapshotLifecycleAction.Request req = new DeleteSnapshotLifecycleAction.Request(lifecycleId); - req.timeout(request.paramAsTime("timeout", req.timeout())); + req.ackTimeout(request.paramAsTime("timeout", req.ackTimeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); return channel -> client.execute(DeleteSnapshotLifecycleAction.INSTANCE, req, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java index 58e86392e0083..622021ca87a89 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java @@ -37,7 +37,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { String snapLifecycleId = request.param("name"); ExecuteSnapshotLifecycleAction.Request req = new ExecuteSnapshotLifecycleAction.Request(snapLifecycleId); - req.timeout(request.paramAsTime("timeout", req.timeout())); + req.ackTimeout(request.paramAsTime("timeout", req.ackTimeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); return channel -> client.execute(ExecuteSnapshotLifecycleAction.INSTANCE, req, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotRetentionAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotRetentionAction.java index 63ae6576be2fa..2175839886022 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotRetentionAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotRetentionAction.java @@ -35,7 +35,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { ExecuteSnapshotRetentionAction.Request req = new ExecuteSnapshotRetentionAction.Request(); - req.timeout(request.paramAsTime("timeout", req.timeout())); + req.ackTimeout(request.paramAsTime("timeout", req.ackTimeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); return channel -> client.execute(ExecuteSnapshotRetentionAction.INSTANCE, req, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java index abb4dcbd9d3db..278e15a9f9b36 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSLMStatusAction.java @@ -36,7 +36,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { AcknowledgedRequest.Plain request = new AcknowledgedRequest.Plain(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(GetSLMStatusAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java index aec1ab4c4ebd2..c6609d85ca1c3 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleAction.java @@ -37,7 +37,7 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { String[] lifecycleNames = Strings.splitStringByCommaToArray(request.param("name")); GetSnapshotLifecycleAction.Request req = new GetSnapshotLifecycleAction.Request(lifecycleNames); - req.timeout(request.paramAsTime("timeout", req.timeout())); + req.ackTimeout(request.paramAsTime("timeout", req.ackTimeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); return channel -> client.execute(GetSnapshotLifecycleAction.INSTANCE, req, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java index fd7abbafe0425..5fea0905f04bd 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestGetSnapshotLifecycleStatsAction.java @@ -36,7 +36,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { AcknowledgedRequest.Plain req = new AcknowledgedRequest.Plain(); - req.timeout(request.paramAsTime("timeout", req.timeout())); + req.ackTimeout(request.paramAsTime("timeout", req.ackTimeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); return channel -> client.execute(GetSnapshotLifecycleStatsAction.INSTANCE, req, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java index ddd86639c9551..968f043f61bd6 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestPutSnapshotLifecycleAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli String snapLifecycleName = request.param("name"); try (XContentParser parser = request.contentParser()) { PutSnapshotLifecycleAction.Request req = PutSnapshotLifecycleAction.Request.parseRequest(snapLifecycleName, parser); - req.timeout(request.paramAsTime("timeout", req.timeout())); + req.ackTimeout(request.paramAsTime("timeout", req.ackTimeout())); req.masterNodeTimeout(request.paramAsTime("master_timeout", req.masterNodeTimeout())); return channel -> client.execute(PutSnapshotLifecycleAction.INSTANCE, req, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStartSLMAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStartSLMAction.java index c33a31cb7888e..4db876149e784 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStartSLMAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStartSLMAction.java @@ -35,7 +35,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { StartSLMAction.Request request = new StartSLMAction.Request(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(StartSLMAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStopSLMAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStopSLMAction.java index aeeaca2f1b237..9131dcc15cf78 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStopSLMAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/action/RestStopSLMAction.java @@ -35,7 +35,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { StopSLMAction.Request request = new StopSLMAction.Request(); - request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + request.ackTimeout(restRequest.paramAsTime("timeout", request.ackTimeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(StopSLMAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index 51379b81d7e9d..dc3f8a514916b 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -105,14 +105,14 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // <2> Delete destination index if requested ActionListener stopTransformActionListener = ActionListener.wrap(unusedStopResponse -> { if (request.isDeleteDestIndex()) { - deleteDestinationIndex(parentTaskId, request.getId(), request.timeout(), deleteDestIndexListener); + deleteDestinationIndex(parentTaskId, request.getId(), request.ackTimeout(), deleteDestIndexListener); } else { deleteDestIndexListener.onResponse(null); } }, listener::onFailure); // <1> Stop transform if it's currently running - stopTransform(transformIsRunning, parentTaskId, request.getId(), request.timeout(), stopTransformActionListener); + stopTransform(transformIsRunning, parentTaskId, request.getId(), request.ackTimeout(), stopTransformActionListener); } private void stopTransform( @@ -152,7 +152,7 @@ private void deleteDestinationIndex( TransformConfig config = transformConfigAndVersion.v1(); String destIndex = config.getDestination().getIndex(); DeleteIndexRequest deleteDestIndexRequest = new DeleteIndexRequest(destIndex); - deleteDestIndexRequest.timeout(timeout); + deleteDestIndexRequest.ackTimeout(timeout); deleteDestIndexRequest.setParentTask(parentTaskId); executeWithHeadersAsync( config.getHeaders(), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index f14ac9a534f28..e79602887b728 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -148,7 +148,7 @@ protected void doExecute(Task task, Request request, ActionListener li ActionListener validateConfigListener = ActionListener.wrap( validateConfigResponse -> getPreview( parentTaskId, - request.timeout(), + request.ackTimeout(), config.getId(), // note: @link{PreviewTransformAction} sets an id, so this is never null function, config.getSource(), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index df36a850a3b0a..b802a6522f367 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -115,7 +115,7 @@ protected void masterOperation(Task task, Request request, ClusterState clusterS client, ClientHelper.TRANSFORM_ORIGIN, ValidateTransformAction.INSTANCE, - new ValidateTransformAction.Request(config, request.isDeferValidation(), request.timeout()), + new ValidateTransformAction.Request(config, request.isDeferValidation(), request.ackTimeout()), l ) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index 6d0e3213d67fc..897ed5c52ec1f 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -135,7 +135,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A false, // defer validation false, // dry run false, // check access - request.timeout(), + request.ackTimeout(), destIndexSettings, updateTransformListener ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java index 01359f351f07a..20902255c0297 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java @@ -144,7 +144,7 @@ protected void masterOperation( waitForTransformTaskStarted( task.getId(), transformTask, - request.timeout(), + request.ackTimeout(), ActionListener.wrap(taskStarted -> listener.onResponse(new StartTransformAction.Response(true)), listener::onFailure) ); }, listener::onFailure); @@ -260,7 +260,7 @@ protected void masterOperation( client, ClientHelper.TRANSFORM_ORIGIN, ValidateTransformAction.INSTANCE, - new ValidateTransformAction.Request(config, false, request.timeout()), + new ValidateTransformAction.Request(config, false, request.ackTimeout()), validationListener ); }, listener::onFailure); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java index 592b7b423c053..eac61dd7d9528 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java @@ -108,7 +108,7 @@ protected void masterOperation(Task ignoredTask, Request request, ClusterState s return; } - recursiveExpandTransformIdsAndUpgrade(request.isDryRun(), request.timeout(), ActionListener.wrap(updatesByStatus -> { + recursiveExpandTransformIdsAndUpgrade(request.isDryRun(), request.ackTimeout(), ActionListener.wrap(updatesByStatus -> { final long updated = updatesByStatus.getOrDefault(UpdateResult.Status.UPDATED, 0L); final long noAction = updatesByStatus.getOrDefault(UpdateResult.Status.NONE, 0L); final long needsUpdate = updatesByStatus.getOrDefault(UpdateResult.Status.NEEDS_UPDATE, 0L); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java index 2a450eb931324..71593d416577e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java @@ -139,7 +139,7 @@ protected void doExecute(Task task, Request request, ActionListener li if (request.isDeferValidation()) { validateQueryListener.onResponse(true); } else { - function.validateQuery(client, config.getHeaders(), config.getSource(), request.timeout(), validateQueryListener); + function.validateQuery(client, config.getHeaders(), config.getSource(), request.ackTimeout(), validateQueryListener); } }, listener::onFailure); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportUpdateWatcherSettingsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportUpdateWatcherSettingsAction.java index b9895b2319599..94ef321806033 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportUpdateWatcherSettingsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportUpdateWatcherSettingsAction.java @@ -93,7 +93,7 @@ protected void masterOperation( final Settings newSettings = Settings.builder().loadFromMap(request.settings()).build(); final UpdateSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpdateSettingsClusterStateUpdateRequest().indices( new Index[] { watcherIndexMd.getIndex() } - ).settings(newSettings).ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()); + ).settings(newSettings).ackTimeout(request.ackTimeout()).masterNodeTimeout(request.masterNodeTimeout()); updateSettingsService.updateSettings(clusterStateUpdateRequest, new ActionListener<>() { @Override diff --git a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java index 4bd88217a2d25..bef0bbb1ee3c5 100644 --- a/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java +++ b/x-pack/qa/freeze-plugin/src/main/java/org/elasticsearch/plugin/freeze/FreezeIndexPlugin.java @@ -75,7 +75,7 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { boolean freeze = request.path().endsWith("/_freeze"); FreezeRequest freezeRequest = new FreezeRequest(Strings.splitStringByCommaToArray(request.param("index"))); - freezeRequest.timeout(request.paramAsTime("timeout", freezeRequest.timeout())); + freezeRequest.ackTimeout(request.paramAsTime("timeout", freezeRequest.ackTimeout())); freezeRequest.masterNodeTimeout(request.paramAsTime("master_timeout", freezeRequest.masterNodeTimeout())); freezeRequest.indicesOptions(IndicesOptions.fromRequest(request, freezeRequest.indicesOptions())); String waitForActiveShards = request.param("wait_for_active_shards"); From 841a27486f4fc5d9fda9a4d7b9e81d41100c6cde Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 22 Apr 2024 08:18:09 +0100 Subject: [PATCH 128/130] Make `MasterServiceTests#testAcking` deterministic (#107651) No need to use a real concurrent `ThreadPool` here, we can cover everything we need with a fake threadpool, making the test deterministic (and faster). Relates #107044 --- .../cluster/service/MasterServiceTests.java | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 6a24c8fc88078..57fb819ccd50e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -1336,7 +1336,7 @@ public void onFailure(Exception e) { } } - public void testAcking() throws InterruptedException { + public void testAcking() { final DiscoveryNode node1 = DiscoveryNodeUtils.builder("node1").roles(emptySet()).build(); final DiscoveryNode node2 = DiscoveryNodeUtils.builder("node2").roles(emptySet()).build(); final DiscoveryNode node3 = DiscoveryNodeUtils.builder("node3").roles(emptySet()).build(); @@ -1344,12 +1344,15 @@ public void testAcking() throws InterruptedException { .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), MasterServiceTests.class.getSimpleName()) .put(Node.NODE_NAME_SETTING.getKey(), "test_node") .build(); + final var deterministicTaskQueue = new DeterministicTaskQueue(); + final var threadPool = deterministicTaskQueue.getThreadPool(); + threadPool.getThreadContext().markAsSystemContext(); try ( - MasterService masterService = new MasterService( - settings, - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + var masterService = createMasterService( + true, + new TaskManager(settings, threadPool, emptySet()), threadPool, - new TaskManager(settings, threadPool, emptySet()) + new StoppableExecutorServiceWrapper(threadPool.generic()) ) ) { @@ -1365,7 +1368,6 @@ public void testAcking() throws InterruptedException { publisherRef.get().publish(e, pl, al); }); masterService.setClusterStateSupplier(() -> initialClusterState); - masterService.start(); class LatchAckListener implements ClusterStateAckListener { private final CountDownLatch latch; @@ -1442,7 +1444,8 @@ public void onFailure(Exception e) { : ClusterState.builder(batchExecutionContext.initialState()).build(); }).submitTask("success-test", new Task(), null); - assertTrue(latch.await(10, TimeUnit.SECONDS)); + deterministicTaskQueue.runAllTasksInTimeOrder(); + safeAwait(latch); } // check that we complete a dynamic ack listener supplied by the task @@ -1473,7 +1476,8 @@ public void onFailure(Exception e) { : ClusterState.builder(batchExecutionContext.initialState()).build(); }).submitTask("success-test", new Task(), null); - assertTrue(latch.await(10, TimeUnit.SECONDS)); + deterministicTaskQueue.runAllTasksInTimeOrder(); + safeAwait(latch); } // check that we supply a no-op publish listener if we only care about acking @@ -1504,7 +1508,8 @@ public void onFailure(Exception e) { : ClusterState.builder(batchExecutionContext.initialState()).build(); }).submitTask("success-test", new Task(), null); - assertTrue(latch.await(10, TimeUnit.SECONDS)); + deterministicTaskQueue.runAllTasksInTimeOrder(); + safeAwait(latch); } // check that exception from acking is passed to listener @@ -1553,7 +1558,8 @@ public void onAckFailure(Exception e) { return ClusterState.builder(batchExecutionContext.initialState()).build(); }).submitTask("node-ack-fail-test", new Task(), null); - assertTrue(latch.await(10, TimeUnit.SECONDS)); + deterministicTaskQueue.runAllTasksInTimeOrder(); + safeAwait(latch); } // check that we don't time out before even committing the cluster state @@ -1587,6 +1593,7 @@ protected AcknowledgedResponse newResponse(boolean acknowledged) { @Override public void onFailure(Exception e) { + assertEquals("mock exception", asInstanceOf(FailedToCommitClusterStateException.class, e).getMessage()); latch.countDown(); } @@ -1597,14 +1604,15 @@ public void onAckTimeout() { } ); - latch.await(); + deterministicTaskQueue.runAllTasksInTimeOrder(); + safeAwait(latch); } // check that we timeout if commit took too long { final CountDownLatch latch = new CountDownLatch(2); - final TimeValue ackTimeout = TimeValue.timeValueMillis(randomInt(100)); + final TimeValue ackTimeout = TimeValue.timeValueMillis(scaledRandomIntBetween(0, 100000)); publisherRef.set((clusterChangedEvent, publishListener, ackListener) -> { publishListener.onResponse(null); @@ -1652,7 +1660,8 @@ public void onAckTimeout() { } ); - latch.await(); + deterministicTaskQueue.runAllTasksInTimeOrder(); + safeAwait(latch); } } } From 25056557de56dd6a951562e8f3e3f01482a3c6e8 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 22 Apr 2024 09:26:51 +0200 Subject: [PATCH 129/130] Use #addWithoutBreaking when adding a negative number of bytes to the circuit breaker in SequenceMatcher (#107655) This change is to avoid calling the real memory circuit breaker when adding negative bytes. --- docs/changelog/107655.yaml | 6 ++++++ .../eql/execution/sequence/SequenceMatcher.java | 14 ++++++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/107655.yaml diff --git a/docs/changelog/107655.yaml b/docs/changelog/107655.yaml new file mode 100644 index 0000000000000..7091224d211f1 --- /dev/null +++ b/docs/changelog/107655.yaml @@ -0,0 +1,6 @@ +pr: 107655 +summary: "Use #addWithoutBreaking when adding a negative number of bytes to the circuit\ + \ breaker in `SequenceMatcher`" +area: EQL +type: bug +issues: [] diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java index 1ad9002f88999..9b0eb7fd95527 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java @@ -434,14 +434,24 @@ private void clearCircuitBreaker() { // sequences. private void trackMemory() { long newRamBytesUsedInFlight = ramBytesUsedInFlight(); - circuitBreaker.addEstimateBytesAndMaybeBreak(newRamBytesUsedInFlight - prevRamBytesUsedInFlight, CB_INFLIGHT_LABEL); + addRequestCircuitBreakerBytes(newRamBytesUsedInFlight - prevRamBytesUsedInFlight, CB_INFLIGHT_LABEL); prevRamBytesUsedInFlight = newRamBytesUsedInFlight; long newRamBytesUsedCompleted = ramBytesUsedCompleted(); - circuitBreaker.addEstimateBytesAndMaybeBreak(newRamBytesUsedCompleted - prevRamBytesUsedCompleted, CB_COMPLETED_LABEL); + addRequestCircuitBreakerBytes(newRamBytesUsedCompleted - prevRamBytesUsedCompleted, CB_COMPLETED_LABEL); prevRamBytesUsedCompleted = newRamBytesUsedCompleted; } + private void addRequestCircuitBreakerBytes(long bytes, String label) { + // Only use the potential to circuit break if bytes are being incremented, In the case of 0 + // bytes, it will trigger the parent circuit breaker. + if (bytes >= 0) { + circuitBreaker.addEstimateBytesAndMaybeBreak(bytes, label); + } else { + circuitBreaker.addWithoutBreaking(bytes); + } + } + @Override public String toString() { return LoggerMessageFormat.format( From c0417b33c21836bc1a8aab647b9b1b6fa23123eb Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 22 Apr 2024 09:28:55 +0100 Subject: [PATCH 130/130] Forbid `null` ack timeout (#107653) Today a cluster state update task with `null` ack timeout implies the timeout is zero. This implicit behaviour is trappy, so this commit forbids it. Relates #107044 --- .../cluster/metadata/MetadataUpdateSettingsServiceIT.java | 5 +++-- .../org/elasticsearch/cluster/service/MasterService.java | 3 +++ .../java/org/elasticsearch/upgrades/SystemIndexMigrator.java | 3 ++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsServiceIT.java index 59f4905d5924b..6e928a42e142b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsServiceIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; @@ -40,7 +41,7 @@ public void testThatNonDynamicSettingChangesTakeEffect() throws Exception { MetadataUpdateSettingsService metadataUpdateSettingsService = internalCluster().getCurrentMasterNodeInstance( MetadataUpdateSettingsService.class ); - UpdateSettingsClusterStateUpdateRequest request = new UpdateSettingsClusterStateUpdateRequest(); + UpdateSettingsClusterStateUpdateRequest request = new UpdateSettingsClusterStateUpdateRequest().ackTimeout(TimeValue.ZERO); List indices = new ArrayList<>(); for (IndicesService indicesService : internalCluster().getInstances(IndicesService.class)) { for (IndexService indexService : indicesService) { @@ -108,7 +109,7 @@ public void testThatNonDynamicSettingChangesDoNotUnncessesarilyCauseReopens() th MetadataUpdateSettingsService metadataUpdateSettingsService = internalCluster().getCurrentMasterNodeInstance( MetadataUpdateSettingsService.class ); - UpdateSettingsClusterStateUpdateRequest request = new UpdateSettingsClusterStateUpdateRequest(); + UpdateSettingsClusterStateUpdateRequest request = new UpdateSettingsClusterStateUpdateRequest().ackTimeout(TimeValue.ZERO); List indices = new ArrayList<>(); for (IndicesService indicesService : internalCluster().getInstances(IndicesService.class)) { for (IndexService indexService : indicesService) { diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 48917ca84e89b..a9f891e555f21 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -46,6 +46,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.node.Node; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskAwareRequest; @@ -703,9 +704,11 @@ private static class TaskAckListener { this.countDown = new CountDown(countDown + 1); // we also wait for onCommit to be called } + @UpdateForV9 // properly forbid ackTimeout == null after enough time has passed to be sure it's not used in production public void onCommit(TimeValue commitTime) { TimeValue ackTimeout = contextPreservingAckListener.ackTimeout(); if (ackTimeout == null) { + assert false : "ackTimeout must always be present: " + contextPreservingAckListener; ackTimeout = TimeValue.ZERO; } final TimeValue timeLeft = TimeValue.timeValueNanos(Math.max(0, ackTimeout.nanos() - commitTime.nanos())); diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index 968e64fcc3888..3dcdf558e6dfc 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -537,7 +538,7 @@ private void setWriteBlock(Index index, boolean readOnlyValue, ActionListener