diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index 1439a5a1185f5..46960c9ed99bf 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -8,16 +8,19 @@ package org.elasticsearch.benchmark.vector; +import org.apache.lucene.codecs.lucene99.OffHeapQuantizedByteVectorValues; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.MMapDirectory; +import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.ScalarQuantizedRandomVectorScorer; import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.vec.VectorScorer; import org.elasticsearch.vec.VectorScorerFactory; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -71,10 +74,10 @@ public class VectorScorerBenchmark { float vec2Offset; float scoreCorrectionConstant; - ScalarQuantizedVectorSimilarity luceneDotScorer; - ScalarQuantizedVectorSimilarity luceneSqrScorer; - VectorScorer nativeDotScorer; - VectorScorer nativeSqrScorer; + RandomVectorScorer luceneDotScorer; + RandomVectorScorer luceneSqrScorer; + RandomVectorScorer nativeDotScorer; + RandomVectorScorer nativeSqrScorer; @Setup public void setup() throws IOException { @@ -107,14 +110,22 @@ public void setup() throws IOException { out.writeInt(Float.floatToIntBits(vec2Offset)); } in = dir.openInput("vector.data", IOContext.DEFAULT); + var values = vectorValues(dims, 2, in); - luceneDotScorer = ScalarQuantizedVectorSimilarity.fromVectorSimilarity( - VectorSimilarityFunction.DOT_PRODUCT, - scoreCorrectionConstant + luceneDotScorer = new ScalarQuantizedRandomVectorScorer( + ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.DOT_PRODUCT, scoreCorrectionConstant), + values.copy(), + vec1, + vec1Offset ); - luceneSqrScorer = ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.EUCLIDEAN, scoreCorrectionConstant); - nativeDotScorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, scoreCorrectionConstant, DOT_PRODUCT, in).get(); - nativeSqrScorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, scoreCorrectionConstant, EUCLIDEAN, in).get(); + luceneSqrScorer = new ScalarQuantizedRandomVectorScorer( + ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.EUCLIDEAN, scoreCorrectionConstant), + values.copy(), + vec1, + vec1Offset + ); + nativeDotScorer = factory.getInt7ScalarQuantizedVectorScorer(DOT_PRODUCT, in, values, scoreCorrectionConstant).get().scorer(0); + nativeSqrScorer = factory.getInt7ScalarQuantizedVectorScorer(EUCLIDEAN, in, values, scoreCorrectionConstant).get().scorer(0); // sanity var f1 = dotProductLucene(); @@ -144,13 +155,13 @@ public void teardown() throws IOException { } @Benchmark - public float dotProductLucene() { - return luceneDotScorer.score(vec1, vec1Offset, vec2, vec2Offset); + public float dotProductLucene() throws IOException { + return luceneDotScorer.score(1); } @Benchmark public float dotProductNative() throws IOException { - return nativeDotScorer.score(0, 1); + return nativeDotScorer.score(1); } @Benchmark @@ -166,13 +177,13 @@ public float dotProductScalar() { // -- square distance @Benchmark - public float squareDistanceLucene() { - return luceneSqrScorer.score(vec1, vec1Offset, vec2, vec2Offset); + public float squareDistanceLucene() throws IOException { + return luceneSqrScorer.score(1); } @Benchmark public float squareDistanceNative() throws IOException { - return nativeSqrScorer.score(0, 1); + return nativeSqrScorer.score(1); } @Benchmark @@ -186,6 +197,10 @@ public float squareDistanceScalar() { return 1 / (1f + adjustedDistance); } + RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in) throws IOException { + return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, in.slice("values", 0, in.length())); + } + // Unsigned int7 byte vectors have values in the range of 0 to 127 (inclusive). static final byte MIN_INT7_VALUE = 0; static final byte MAX_INT7_VALUE = 127; diff --git a/docs/changelog/108746.yaml b/docs/changelog/108746.yaml new file mode 100644 index 0000000000000..93ed917f3b56e --- /dev/null +++ b/docs/changelog/108746.yaml @@ -0,0 +1,5 @@ +pr: 108746 +summary: Support synthetic source for `aggregate_metric_double` when ignore_malf… +area: Mapping +type: feature +issues: [] diff --git a/docs/changelog/108849.yaml b/docs/changelog/108849.yaml new file mode 100644 index 0000000000000..7c503efe9187b --- /dev/null +++ b/docs/changelog/108849.yaml @@ -0,0 +1,6 @@ +pr: 108849 +summary: "[Osquery] Extend `kibana_system` role with an access to new `osquery_manager`\ + \ index" +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/108854.yaml b/docs/changelog/108854.yaml new file mode 100644 index 0000000000000..d6a880830f0d9 --- /dev/null +++ b/docs/changelog/108854.yaml @@ -0,0 +1,5 @@ +pr: 108854 +summary: "[Connector API] Fix bug with parsing *_doc_count nullable fields" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/108862.yaml b/docs/changelog/108862.yaml new file mode 100644 index 0000000000000..ddba15f11e8f5 --- /dev/null +++ b/docs/changelog/108862.yaml @@ -0,0 +1,5 @@ +pr: 108862 +summary: "Apm-data: set codec: best_compression for logs-apm.* data streams" +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/108878.yaml b/docs/changelog/108878.yaml new file mode 100644 index 0000000000000..1a8127869a647 --- /dev/null +++ b/docs/changelog/108878.yaml @@ -0,0 +1,5 @@ +pr: 108878 +summary: Support arrays in fallback synthetic source implementation +area: Mapping +type: feature +issues: [] diff --git a/docs/changelog/108881.yaml b/docs/changelog/108881.yaml new file mode 100644 index 0000000000000..b6de1129cfa03 --- /dev/null +++ b/docs/changelog/108881.yaml @@ -0,0 +1,5 @@ +pr: 108881 +summary: Add synthetic source support for `geo_shape` via fallback implementation +area: Mapping +type: feature +issues: [] diff --git a/docs/changelog/108886.yaml b/docs/changelog/108886.yaml new file mode 100644 index 0000000000000..18df59e577713 --- /dev/null +++ b/docs/changelog/108886.yaml @@ -0,0 +1,5 @@ +pr: 108886 +summary: Expose `?master_timeout` on get-shutdown API +area: Infra/Node Lifecycle +type: bug +issues: [] diff --git a/docs/changelog/108931.yaml b/docs/changelog/108931.yaml new file mode 100644 index 0000000000000..520637c5928e7 --- /dev/null +++ b/docs/changelog/108931.yaml @@ -0,0 +1,5 @@ +pr: 108931 +summary: Guard systemd library lookup from unreadable directories +area: Infra/Core +type: bug +issues: [] diff --git a/docs/changelog/108942.yaml b/docs/changelog/108942.yaml new file mode 100644 index 0000000000000..c58b06a92cee8 --- /dev/null +++ b/docs/changelog/108942.yaml @@ -0,0 +1,5 @@ +pr: 108942 +summary: Fix NPE in trained model assignment updater +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/108999.yaml b/docs/changelog/108999.yaml new file mode 100644 index 0000000000000..089d765b4e2d0 --- /dev/null +++ b/docs/changelog/108999.yaml @@ -0,0 +1,5 @@ +pr: 108999 +summary: Use default translog durability on AD results index +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/109020.yaml b/docs/changelog/109020.yaml new file mode 100644 index 0000000000000..c3efb1a1409bf --- /dev/null +++ b/docs/changelog/109020.yaml @@ -0,0 +1,6 @@ +pr: 109020 +summary: Only skip deleting a downsampled index if downsampling is in progress as + part of DSL retention +area: Data streams +type: bug +issues: [] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 82e7bb3cea9a5..6cd23fc524f96 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -24,8 +24,7 @@ POST /_query/async | SORT year | LIMIT 5 """, - "wait_for_completion_timeout": "2s", - "version": "2024.04.01" + "wait_for_completion_timeout": "2s" } ---- // TEST[setup:library] diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index e5e0e9fda12ec..fbac57d88118e 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -16,8 +16,7 @@ POST /_query | STATS MAX(page_count) BY year | SORT year | LIMIT 5 - """, - "version": "2024.04.01" + """ } ---- // TEST[setup:library] @@ -79,10 +78,6 @@ For syntax, refer to <>. `query`:: (Required, string) {esql} query to run. For syntax, refer to <>. -`version`:: -(Required, string) {esql} language version. Can be sent in short or long form, e.g. -`2024.04.01` or `2024.04.01.🚀`. See <> for details. - [discrete] [role="child_attributes"] [[esql-query-api-response-body]] diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index 106dba0e85dfe..de2b6dedd8776 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -16,8 +16,7 @@ The <> accepts an {esql} query string in the ---- POST /_query?format=txt { - "query": "FROM library | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5", - "version": "2024.04.01" + "query": "FROM library | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5" } ---- // TEST[setup:library] @@ -56,8 +55,7 @@ POST /_query?format=txt | KEEP author, name, page_count, release_date | SORT page_count DESC | LIMIT 5 - """, - "version": "2024.04.01" + """ } ---- // TEST[setup:library] @@ -145,8 +143,7 @@ POST /_query?format=txt "lte": 200 } } - }, - "version": "2024.04.01" + } } ---- // TEST[setup:library] @@ -182,8 +179,7 @@ POST /_query?format=json | SORT page_count DESC | LIMIT 5 """, - "columnar": true, - "version": "2024.04.01" + "columnar": true } ---- // TEST[setup:library] @@ -230,8 +226,7 @@ POST /_query | EVAL birth_date = date_parse(birth_date_string) | EVAL month_of_birth = DATE_FORMAT("MMMM",birth_date) | LIMIT 5 - """, - "version": "2024.04.01" + """ } ---- // TEST[setup:library] @@ -254,8 +249,7 @@ POST /_query | STATS count = COUNT(*) by year | WHERE count > 0 | LIMIT 5 - """, - "version": "2024.04.01" + """ } ---- // TEST[setup:library] @@ -276,8 +270,7 @@ POST /_query | WHERE count > ? | LIMIT 5 """, - "params": [300, "Frank Herbert", 0], - "version": "2024.04.01" + "params": [300, "Frank Herbert", 0] } ---- // TEST[setup:library] @@ -311,8 +304,7 @@ POST /_query/async | SORT year | LIMIT 5 """, - "wait_for_completion_timeout": "2s", - "version": "2024.04.01" + "wait_for_completion_timeout": "2s" } ---- // TEST[setup:library] diff --git a/docs/reference/esql/esql-using.asciidoc b/docs/reference/esql/esql-using.asciidoc index d45a7f1743d23..3e045163069ec 100644 --- a/docs/reference/esql/esql-using.asciidoc +++ b/docs/reference/esql/esql-using.asciidoc @@ -18,12 +18,8 @@ Using {esql} to query across multiple clusters. <>:: Using the <> to list and cancel {esql} queries. -<>:: -Information about {esql} language versions. - include::esql-rest.asciidoc[] include::esql-kibana.asciidoc[] include::esql-security-solution.asciidoc[] include::esql-across-clusters.asciidoc[] include::task-management.asciidoc[] -include::esql-version.asciidoc[] diff --git a/docs/reference/esql/esql-version.asciidoc b/docs/reference/esql/esql-version.asciidoc deleted file mode 100644 index daeb796ecc5b1..0000000000000 --- a/docs/reference/esql/esql-version.asciidoc +++ /dev/null @@ -1,49 +0,0 @@ -[[esql-version]] -=== {esql} language versions - -++++ -Language versions -++++ - -[discrete] -[[esql-versions-released]] -==== Released versions - -* Version `2024.04.01` - -[discrete] -[[esql-versions-explanation]] -==== How versions work - -{esql} language versions are independent of {es} versions. -Versioning the language ensures that your queries will always -remain valid, independent of new {es} and {esql} releases. And it lets us -evolve ESQL as we learn more from people using it. We don't plan to make -huge changes to it, but we know we've made mistakes and we don't want those -to live forever. - -For instance, the following query will remain valid, even if a future -version of {esql} introduces syntax changes or changes how the used -commands or functions work. - -[source,console] ----- -POST /_query?format=txt -{ - "version": "2024.04.01", - "query": """ - FROM library - | EVAL release_month = DATE_TRUNC(1 month, release_date) - | KEEP release_month - | SORT release_month ASC - | LIMIT 3 - """ -} ----- -// TEST[setup:library] - -We won't make breaking changes to released {esql} versions and -versions will remain supported until they are deprecated. -New features, bug fixes, and performance improvements -will continue to be added to released {esql} versions, -provided they do not involve breaking changes. diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 35f46db25425b..8ff645bba863e 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -17,8 +17,7 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | LIMIT 2", - "version": "2024.04.01" + "query": "FROM mv | LIMIT 2" } ---- @@ -66,8 +65,7 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | LIMIT 2", - "version": "2024.04.01" + "query": "FROM mv | LIMIT 2" } ---- @@ -108,8 +106,7 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | LIMIT 2", - "version": "2024.04.01" + "query": "FROM mv | LIMIT 2" } ---- @@ -151,8 +148,7 @@ POST /mv/_bulk?refresh POST /_query { - "query": "FROM mv | EVAL b=TO_STRING(b) | LIMIT 2", - "version": "2024.04.01" + "query": "FROM mv | EVAL b=TO_STRING(b) | LIMIT 2" } ---- @@ -175,7 +171,7 @@ POST /_query ==== Functions Unless otherwise documented functions will return `null` when applied to a multivalued -field. This behavior may change in a later version. +field. [source,console,id=esql-multivalued-fields-mv-into-null] ---- @@ -190,8 +186,7 @@ POST /mv/_bulk?refresh ---- POST /_query { - "query": "FROM mv | EVAL b + 2, a + b | LIMIT 4", - "version": "2024.04.01" + "query": "FROM mv | EVAL b + 2, a + b | LIMIT 4" } ---- // TEST[continued] @@ -230,8 +225,7 @@ Work around this limitation by converting the field to single value with one of: ---- POST /_query { - "query": "FROM mv | EVAL b=MV_MIN(b) | EVAL b + 2, a + b | LIMIT 4", - "version": "2024.04.01" + "query": "FROM mv | EVAL b=MV_MIN(b) | EVAL b + 2, a + b | LIMIT 4" } ---- // TEST[continued] diff --git a/docs/reference/indices/put-index-template.asciidoc b/docs/reference/indices/put-index-template.asciidoc index bcc7fa9caa812..772bd51afdce8 100644 --- a/docs/reference/indices/put-index-template.asciidoc +++ b/docs/reference/indices/put-index-template.asciidoc @@ -12,7 +12,7 @@ that can be applied automatically to new indices. -------------------------------------------------- PUT /_index_template/template_1 { - "index_patterns" : ["te*"], + "index_patterns" : ["template*"], "priority" : 1, "template": { "settings" : { @@ -186,7 +186,7 @@ You can include <> in an index template. -------------------------------------------------- PUT _index_template/template_1 { - "index_patterns" : ["te*"], + "index_patterns" : ["template*"], "template": { "settings" : { "number_of_shards" : 1 @@ -218,7 +218,7 @@ the template with the highest priority is used. For example: -------------------------------------------------- PUT /_index_template/template_1 { - "index_patterns" : ["t*"], + "index_patterns" : ["temp*"], "priority" : 0, "template": { "settings" : { @@ -233,7 +233,7 @@ PUT /_index_template/template_1 PUT /_index_template/template_2 { - "index_patterns" : ["te*"], + "index_patterns" : ["template*"], "priority" : 1, "template": { "settings" : { @@ -246,7 +246,7 @@ PUT /_index_template/template_2 } -------------------------------------------------- -For indices that start with `te*`, `_source` will enabled, and the index will have two primary +For indices that start with `template*`, `_source` will enabled, and the index will have two primary shards and one replica, because only `template_2` will be applied. NOTE: Multiple templates with overlapping index patterns at the same priority are not allowed, and diff --git a/docs/reference/mapping/fields/synthetic-source.asciidoc b/docs/reference/mapping/fields/synthetic-source.asciidoc index f730db5c2af78..1eba9dfba8b50 100644 --- a/docs/reference/mapping/fields/synthetic-source.asciidoc +++ b/docs/reference/mapping/fields/synthetic-source.asciidoc @@ -52,6 +52,7 @@ types: ** <> ** <> ** <> +** <> ** <> ** <> ** <> diff --git a/docs/reference/mapping/types/aggregate-metric-double.asciidoc b/docs/reference/mapping/types/aggregate-metric-double.asciidoc index e702d34f07d4c..8e14fba976360 100644 --- a/docs/reference/mapping/types/aggregate-metric-double.asciidoc +++ b/docs/reference/mapping/types/aggregate-metric-double.asciidoc @@ -260,7 +260,7 @@ any issues, but features in technical preview are not subject to the support SLA of official GA features. `aggregate_metric-double` fields support <> in their default -configuration. Synthetic `_source` cannot be used together with <>. +configuration. For example: [source,console,id=synthetic-source-aggregate-metric-double-example] diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 628f764c04fe9..20f79df8950af 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -493,3 +493,16 @@ Due to the complex input structure and index representation of shapes, it is not currently possible to sort shapes or retrieve their fields directly. The `geo_shape` value is only retrievable through the `_source` field. + +[[geo-shape-synthetic-source]] +==== Synthetic source + +IMPORTANT: Synthetic `_source` is Generally Available only for TSDB indices +(indices that have `index.mode` set to `time_series`). For other indices +synthetic `_source` is in technical preview. Features in technical preview may +be changed or removed in a future release. Elastic will work to fix +any issues, but features in technical preview are not subject to the support SLA +of official GA features. + +`geo_shape` fields support <> in their +default configuration. diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 0d21f648ab58b..e10240a66fbb9 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -504,6 +504,7 @@ GET /_xpack/usage // TESTRESPONSE[s/"policy_stats" : \[[^\]]*\]/"policy_stats" : $body.$_path/] // TESTRESPONSE[s/"slm" : \{[^\}]*\},/"slm" : $body.$_path,/] // TESTRESPONSE[s/"health_api" : \{[^\}]*\}\s*\}/"health_api" : $body.$_path/] +// TESTRESPONSE[s/"data_streams" : \{[^\}]*\},/"data_streams" : $body.$_path,/] // TESTRESPONSE[s/ : true/ : $body.$_path/] // TESTRESPONSE[s/ : false/ : $body.$_path/] // TESTRESPONSE[s/ : (\-)?[0-9]+/ : $body.$_path/] @@ -519,3 +520,4 @@ GET /_xpack/usage // 5. All of the numbers and strings on the right hand side of *every* field in // the response are ignored. So we're really only asserting things about the // the shape of this response, not the values in it. +// 6. Ignore the contents of data streams until the failure store is tech preview. diff --git a/docs/reference/synonyms/apis/put-synonyms-set.asciidoc b/docs/reference/synonyms/apis/put-synonyms-set.asciidoc index 5651c4c99adcd..101413ece38cb 100644 --- a/docs/reference/synonyms/apis/put-synonyms-set.asciidoc +++ b/docs/reference/synonyms/apis/put-synonyms-set.asciidoc @@ -7,6 +7,10 @@ Creates or updates a synonyms set. +NOTE: Synonyms sets are limited to a maximum of 10,000 synonym rules per set. +Synonym sets with more than 10,000 synonym rules will provide inconsistent search results. +If you need to manage more synonym rules, you can create multiple synonyms sets. + [[put-synonyms-set-request]] ==== {api-request-title} diff --git a/docs/reference/synonyms/apis/synonyms-apis.asciidoc b/docs/reference/synonyms/apis/synonyms-apis.asciidoc index 9b92ba8e8579d..2275219e66445 100644 --- a/docs/reference/synonyms/apis/synonyms-apis.asciidoc +++ b/docs/reference/synonyms/apis/synonyms-apis.asciidoc @@ -18,6 +18,10 @@ This provides an alternative to: Synonyms sets can be used to configure <> and <>. These filters are applied as part of the <> process by the <>. +NOTE: Synonyms sets are limited to a maximum of 10,000 synonym rules per set. +Synonym sets with more than 10,000 synonym rules will provide inconsistent search results. +If you need to manage more synonym rules, you can create multiple synonyms sets. + [discrete] [[synonyms-sets-apis]] === Synonyms sets APIs diff --git a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc index c92ebaca86a57..4b49e191f4e16 100644 --- a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc +++ b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc @@ -216,18 +216,19 @@ unassigned. See <>. If a node containing a primary shard is lost, {es} can typically replace it using a replica on another node. If you can't recover the node and replicas -don't exist or are irrecoverable, you'll need to re-add the missing data from a -<> or the original data source. +don't exist or are irrecoverable, <> will report `no_valid_shard_copy` and you'll need to do one of the following: +* restore the missing data from <> +* index the missing data from its original data source +* accept data loss on the index-level by running <> +* accept data loss on the shard-level by executing <> allocate_stale_primary or allocate_empty_primary command with `accept_data_loss: true` ++ WARNING: Only use this option if node recovery is no longer possible. This process allocates an empty primary shard. If the node later rejoins the cluster, {es} will overwrite its primary shard with data from this newer empty shard, resulting in data loss. - -Use the <> to manually allocate the -unassigned primary shard to another data node in the same tier. Set -`accept_data_loss` to `true`. - ++ [source,console] ---- POST _cluster/reroute?metric=none @@ -246,7 +247,3 @@ POST _cluster/reroute?metric=none ---- // TEST[s/^/PUT my-index\n/] // TEST[catch:bad_request] - -If you backed up the missing index data to a snapshot, use the -<> to restore the individual index. -Alternatively, you can index the missing data from the original data source. diff --git a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaKernel32Library.java b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaKernel32Library.java index 06704aaa30056..0bfdf959f7b58 100644 --- a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaKernel32Library.java +++ b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaKernel32Library.java @@ -13,8 +13,10 @@ import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import com.sun.jna.Structure; +import com.sun.jna.WString; import com.sun.jna.win32.StdCallLibrary; +import org.elasticsearch.nativeaccess.WindowsFunctions.ConsoleCtrlHandler; import org.elasticsearch.nativeaccess.lib.Kernel32Library; import java.util.List; @@ -96,6 +98,22 @@ public long Type() { } } + /** + * JNA adaptation of {@link ConsoleCtrlHandler} + */ + public static class NativeHandlerCallback implements StdCallLibrary.StdCallCallback { + + private final ConsoleCtrlHandler handler; + + public NativeHandlerCallback(ConsoleCtrlHandler handler) { + this.handler = handler; + } + + public boolean callback(long dwCtrlType) { + return handler.handle((int) dwCtrlType); + } + } + private interface NativeFunctions extends StdCallLibrary { Pointer GetCurrentProcess(); @@ -106,9 +124,14 @@ private interface NativeFunctions extends StdCallLibrary { int VirtualQueryEx(Pointer handle, Pointer address, JnaMemoryBasicInformation memoryInfo, int length); boolean SetProcessWorkingSetSize(Pointer handle, SizeT minSize, SizeT maxSize); + + int GetShortPathNameW(WString lpszLongPath, char[] lpszShortPath, int cchBuffer); + + boolean SetConsoleCtrlHandler(StdCallLibrary.StdCallCallback handler, boolean add); } private final NativeFunctions functions; + private NativeHandlerCallback consoleCtrlHandlerCallback = null; JnaKernel32Library() { this.functions = Native.load("kernel32", NativeFunctions.class); @@ -161,4 +184,17 @@ public boolean SetProcessWorkingSetSize(Handle handle, long minSize, long maxSiz var jnaHandle = (JnaHandle) handle; return functions.SetProcessWorkingSetSize(jnaHandle.pointer, new SizeT(minSize), new SizeT(maxSize)); } + + @Override + public int GetShortPathNameW(String lpszLongPath, char[] lpszShortPath, int cchBuffer) { + var wideFileName = new WString(lpszLongPath); + return functions.GetShortPathNameW(wideFileName, lpszShortPath, cchBuffer); + } + + @Override + public boolean SetConsoleCtrlHandler(ConsoleCtrlHandler handler, boolean add) { + assert consoleCtrlHandlerCallback == null; + consoleCtrlHandlerCallback = new NativeHandlerCallback(handler); + return functions.SetConsoleCtrlHandler(consoleCtrlHandlerCallback, true); + } } diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 7a545787bbdae..b7e6a1c704e6e 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -18,7 +18,7 @@ configurations { } var zstdVersion = "1.5.5" -var vecVersion = "1.0.8" +var vecVersion = "1.0.9" repositories { exclusiveContent { diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java index e8c7ab99143db..7f91d0425af47 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java @@ -52,6 +52,13 @@ static NativeAccess instance() { */ Zstd getZstd(); + /** + * Returns an accessor for native functions only available on Windows, or {@code null} if not on Windows. + */ + default WindowsFunctions getWindowsFunctions() { + return null; + } + /* * Returns the vector similarity functions, or an empty optional. */ diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsFunctions.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsFunctions.java new file mode 100644 index 0000000000000..c57109678a0b8 --- /dev/null +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsFunctions.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.nativeaccess; + +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.nativeaccess.lib.Kernel32Library; + +/** + * Native functions specific to the Windows operating system. + */ +public class WindowsFunctions { + private static final Logger logger = LogManager.getLogger(Systemd.class); + + private final Kernel32Library kernel; + + WindowsFunctions(Kernel32Library kernel) { + this.kernel = kernel; + } + + /** + * Retrieves the short path form of the specified path. + * + * @param path the path + * @return the short path name, or the original path name if unsupported or unavailable + */ + public String getShortPathName(String path) { + String longPath = "\\\\?\\" + path; + // first we get the length of the buffer needed + final int length = kernel.GetShortPathNameW(longPath, null, 0); + if (length == 0) { + logger.warn("failed to get short path name: {}", kernel.GetLastError()); + return path; + } + final char[] shortPath = new char[length]; + // knowing the length of the buffer, now we get the short name + if (kernel.GetShortPathNameW(longPath, shortPath, length) > 0) { + assert shortPath[length - 1] == '\0'; + return new String(shortPath, 0, length - 1); + } else { + logger.warn("failed to get short path name: {}", kernel.GetLastError()); + return path; + } + } + + /** + * Adds a Console Ctrl Handler for Windows. On non-windows this is a noop. + * + * @return true if the handler is correctly set + */ + public boolean addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + return kernel.SetConsoleCtrlHandler(dwCtrlType -> { + if (logger.isDebugEnabled()) { + logger.debug("console control handler received event [{}]", dwCtrlType); + } + return handler.handle(dwCtrlType); + }, true); + } + + /** + * Windows callback for console events + * + * @see HandlerRoutine docs + */ + public interface ConsoleCtrlHandler { + + int CTRL_CLOSE_EVENT = 2; + + /** + * Handles the Ctrl event. + * + * @param code the code corresponding to the Ctrl sent. + * @return true if the handler processed the event, false otherwise. If false, the next handler will be called. + */ + boolean handle(int code); + } +} diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsNativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsNativeAccess.java index 431dc4f3d6e64..843cc73fbed02 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsNativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/WindowsNativeAccess.java @@ -28,10 +28,12 @@ class WindowsNativeAccess extends AbstractNativeAccess { public static final int MEM_COMMIT = 0x1000; private final Kernel32Library kernel; + private final WindowsFunctions windowsFunctions; WindowsNativeAccess(NativeLibraryProvider libraryProvider) { super("Windows", libraryProvider); this.kernel = libraryProvider.getLibrary(Kernel32Library.class); + this.windowsFunctions = new WindowsFunctions(kernel); } @Override @@ -71,6 +73,11 @@ public ProcessLimits getProcessLimits() { return new ProcessLimits(ProcessLimits.UNKNOWN, ProcessLimits.UNKNOWN, ProcessLimits.UNKNOWN); } + @Override + public WindowsFunctions getWindowsFunctions() { + return windowsFunctions; + } + @Override public Optional getVectorSimilarityFunctions() { return Optional.empty(); // not supported yet diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/Kernel32Library.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/Kernel32Library.java index 43dab17b82ff8..43337f4532bed 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/Kernel32Library.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/Kernel32Library.java @@ -8,6 +8,8 @@ package org.elasticsearch.nativeaccess.lib; +import org.elasticsearch.nativeaccess.WindowsFunctions.ConsoleCtrlHandler; + public non-sealed interface Kernel32Library extends NativeLibrary { interface Handle {} @@ -78,4 +80,25 @@ interface MemoryBasicInformation { * @see SetProcessWorkingSetSize docs */ boolean SetProcessWorkingSetSize(Handle handle, long minSize, long maxSize); + + /** + * Retrieves the short path form of the specified path. + * + * @param lpszLongPath the path string + * @param lpszShortPath a buffer to receive the short name + * @param cchBuffer the size of the buffer + * @return the length of the string copied into {@code lpszShortPath}, otherwise zero for failure + * @see GetShortPathName docs + */ + int GetShortPathNameW(String lpszLongPath, char[] lpszShortPath, int cchBuffer); + + /** + * Native call to the Kernel32 API to set a new Console Ctrl Handler. + * + * @param handler A callback to handle control events + * @param add True if the handler should be added, false if it should replace existing handlers + * @return true if the handler is correctly set + * @see SetConsoleCtrlHandler docs + */ + boolean SetConsoleCtrlHandler(ConsoleCtrlHandler handler, boolean add); } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/ArenaUtil.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/ArenaUtil.java new file mode 100644 index 0000000000000..0724386cca22c --- /dev/null +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/ArenaUtil.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.nativeaccess.jdk; + +import java.lang.foreign.Arena; +import java.lang.foreign.MemoryLayout; +import java.lang.foreign.MemorySegment; +import java.nio.charset.Charset; + +import static java.lang.foreign.ValueLayout.JAVA_BYTE; + +/** + * Utility methods to act on Arena apis which have changed in subsequent JDK releases. + */ +class ArenaUtil { + + /** + * Allocate an array of the given memory layout. + */ + static MemorySegment allocate(Arena arena, MemoryLayout layout, int count) { + return arena.allocateArray(layout, count); + } + + /** + * Allocate and copy the given string into native memory. + */ + static MemorySegment allocateFrom(Arena arena, String str, Charset charset) { + return arena.allocateArray(JAVA_BYTE, str.getBytes(charset)); + } + + private ArenaUtil() {} +} diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkKernel32Library.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkKernel32Library.java index 66eba5cd8dad6..bbfd26bd061d0 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkKernel32Library.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkKernel32Library.java @@ -8,6 +8,7 @@ package org.elasticsearch.nativeaccess.jdk; +import org.elasticsearch.nativeaccess.WindowsFunctions.ConsoleCtrlHandler; import org.elasticsearch.nativeaccess.lib.Kernel32Library; import java.lang.foreign.Arena; @@ -18,14 +19,18 @@ import java.lang.foreign.StructLayout; import java.lang.invoke.MethodHandle; import java.lang.invoke.VarHandle; +import java.nio.charset.StandardCharsets; import static java.lang.foreign.MemoryLayout.PathElement.groupElement; import static java.lang.foreign.MemoryLayout.paddingLayout; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_BOOLEAN; +import static java.lang.foreign.ValueLayout.JAVA_CHAR; import static java.lang.foreign.ValueLayout.JAVA_INT; import static java.lang.foreign.ValueLayout.JAVA_LONG; import static org.elasticsearch.nativeaccess.jdk.LinkerHelper.downcallHandle; +import static org.elasticsearch.nativeaccess.jdk.LinkerHelper.upcallHandle; +import static org.elasticsearch.nativeaccess.jdk.LinkerHelper.upcallStub; import static org.elasticsearch.nativeaccess.jdk.MemorySegmentUtil.varHandleWithoutOffset; class JdkKernel32Library implements Kernel32Library { @@ -52,6 +57,21 @@ class JdkKernel32Library implements Kernel32Library { "SetProcessWorkingSetSize", FunctionDescriptor.of(ADDRESS, JAVA_LONG, JAVA_LONG) ); + private static final MethodHandle GetShortPathNameW$mh = downcallHandleWithError( + "GetShortPathNameW", + FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT) + ); + private static final MethodHandle SetConsoleCtrlHandler$mh = downcallHandleWithError( + "SetConsoleCtrlHandler", + FunctionDescriptor.of(JAVA_BOOLEAN, ADDRESS, JAVA_BOOLEAN) + ); + + private static final FunctionDescriptor ConsoleCtrlHandler_handle$fd = FunctionDescriptor.of(JAVA_BOOLEAN, JAVA_INT); + private static final MethodHandle ConsoleCtrlHandler_handle$mh = upcallHandle( + ConsoleCtrlHandler.class, + "handle", + ConsoleCtrlHandler_handle$fd + ); private static MethodHandle downcallHandleWithError(String function, FunctionDescriptor functionDescriptor) { return downcallHandle(function, functionDescriptor, CAPTURE_GETLASTERROR_OPTION); @@ -208,4 +228,38 @@ public boolean SetProcessWorkingSetSize(Handle process, long minSize, long maxSi throw new AssertionError(t); } } + + @Override + public int GetShortPathNameW(String lpszLongPath, char[] lpszShortPath, int cchBuffer) { + try (Arena arena = Arena.ofConfined()) { + MemorySegment wideFileName = ArenaUtil.allocateFrom(arena, lpszLongPath + "\0", StandardCharsets.UTF_16LE); + MemorySegment shortPath; + if (lpszShortPath != null) { + shortPath = ArenaUtil.allocate(arena, JAVA_CHAR, cchBuffer); + } else { + shortPath = MemorySegment.NULL; + } + + int ret = (int) GetShortPathNameW$mh.invokeExact(lastErrorState, wideFileName, shortPath, cchBuffer); + if (shortPath != MemorySegment.NULL) { + for (int i = 0; i < cchBuffer; ++i) { + lpszShortPath[i] = shortPath.getAtIndex(JAVA_CHAR, i); + } + } + return ret; + } catch (Throwable t) { + throw new AssertionError(t); + } + } + + @Override + public boolean SetConsoleCtrlHandler(ConsoleCtrlHandler handler, boolean add) { + // use the global arena so the handler will have the lifetime of the jvm + MemorySegment nativeHandler = upcallStub(ConsoleCtrlHandler_handle$mh, handler, ConsoleCtrlHandler_handle$fd, Arena.global()); + try { + return (boolean) SetConsoleCtrlHandler$mh.invokeExact(lastErrorState, nativeHandler, add); + } catch (Throwable t) { + throw new AssertionError(t); + } + } } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 0af87154960ad..c34c8c070edc5 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -16,9 +16,13 @@ import java.lang.foreign.FunctionDescriptor; import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; +import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -61,17 +65,36 @@ static List findLibSystemd() { // so we must manually check the library path to find what we need. final Path libsystemd = Paths.get("libsystemd.so.0"); final String libpath = System.getProperty("java.library.path"); - return Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).flatMap(p -> { + final List foundPaths = new ArrayList<>(); + Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).forEach(rootPath -> { try { - return Files.find( - p, - Integer.MAX_VALUE, - (fp, attrs) -> (attrs.isDirectory() == false && fp.getFileName().equals(libsystemd)) - ); + Files.walkFileTree(rootPath, new SimpleFileVisitor<>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { + if (Files.isReadable(dir)) { + return FileVisitResult.CONTINUE; + } + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { + if (file.getFileName().equals(libsystemd)) { + foundPaths.add(file.toAbsolutePath().toString()); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) { + return FileVisitResult.CONTINUE; + } + }); } catch (IOException e) { throw new UncheckedIOException(e); } - }).map(p -> p.toAbsolutePath().toString()).toList(); + }); + return foundPaths; } private static final MethodHandle sd_notify$mh = downcallHandle("sd_notify", FunctionDescriptor.of(JAVA_INT, JAVA_INT, ADDRESS)); diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/LinkerHelper.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/LinkerHelper.java index c0224efb0ae9e..2f13cb1324e56 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/LinkerHelper.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/LinkerHelper.java @@ -22,7 +22,7 @@ class LinkerHelper { private static final Linker LINKER = Linker.nativeLinker(); private static final SymbolLookup SYMBOL_LOOKUP; - private static final MethodHandles.Lookup MH_LOOKUP = MethodHandles.publicLookup(); + private static final MethodHandles.Lookup MH_LOOKUP = MethodHandles.lookup(); static { // We first check the loader lookup, which contains libs loaded by System.load and System.loadLibrary. diff --git a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/ArenaUtil.java b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/ArenaUtil.java new file mode 100644 index 0000000000000..387473e23a561 --- /dev/null +++ b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/ArenaUtil.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.nativeaccess.jdk; + +import java.lang.foreign.Arena; +import java.lang.foreign.MemoryLayout; +import java.lang.foreign.MemorySegment; +import java.nio.charset.Charset; + +public class ArenaUtil { + + /** + * Allocate an array of the given memory layout. + */ + static MemorySegment allocate(Arena arena, MemoryLayout layout, int count) { + return arena.allocate(layout, count); + } + + /** + * Allocate and copy the given string into native memory. + */ + static MemorySegment allocateFrom(Arena arena, String str, Charset charset) { + return arena.allocateFrom(str, charset); + } + + private ArenaUtil() {} +} diff --git a/libs/vec/native/build.gradle b/libs/vec/native/build.gradle index 7edf46d406862..ef9120680646a 100644 --- a/libs/vec/native/build.gradle +++ b/libs/vec/native/build.gradle @@ -62,6 +62,10 @@ model { } } clang(Clang) { + target("aarch64") { + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=armv8-a"]) } + } + target("amd64") { cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2"]) } } diff --git a/libs/vec/native/publish_vec_binaries.sh b/libs/vec/native/publish_vec_binaries.sh index 2ed6c750ab9e8..d11645ff71c4a 100755 --- a/libs/vec/native/publish_vec_binaries.sh +++ b/libs/vec/native/publish_vec_binaries.sh @@ -19,7 +19,7 @@ if [ -z "$ARTIFACTORY_API_KEY" ]; then exit 1; fi -VERSION="1.0.8" +VERSION="1.0.9" ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" TEMP=$(mktemp -d) @@ -29,14 +29,16 @@ if curl -sS -I --fail --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/ve fi echo 'Building Darwin binary...' -./gradlew --quiet --console=plain vecAarch64SharedLibrary +./gradlew --quiet --console=plain clean vecAarch64SharedLibrary echo 'Building Linux binary...' +mkdir -p build/libs/vec/shared/aarch64/ DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet .) docker run $DOCKER_IMAGE > build/libs/vec/shared/aarch64/libvec.so echo 'Building Linux x64 binary...' DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet .) +mkdir -p build/libs/vec/shared/amd64 docker run --platform linux/amd64 $DOCKER_IMAGE > build/libs/vec/shared/amd64/libvec.so mkdir -p $TEMP/darwin-aarch64 diff --git a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorer.java b/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorer.java deleted file mode 100644 index 42165fe9f5905..0000000000000 --- a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorer.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.vec; - -import java.io.IOException; - -/** A scorer of vectors. */ -public interface VectorScorer { - - /** Computes the score of the vectors at the given ordinals. */ - float score(int firstOrd, int secondOrd) throws IOException; - - /** The per-vector dimension size. */ - int dims(); - - /** The maximum ordinal of vector this scorer can score. */ - int maxOrd(); - - VectorScorer copy(); - -} diff --git a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactory.java b/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactory.java index 852ee341e80c2..ad7f467da9d2a 100644 --- a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactory.java +++ b/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactory.java @@ -9,6 +9,8 @@ package org.elasticsearch.vec; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import java.util.Optional; @@ -23,20 +25,18 @@ static Optional instance() { * Returns an optional containing an int7 scalar quantized vector scorer for * the given parameters, or an empty optional if a scorer is not supported. * - * @param dims the vector dimensions - * @param maxOrd the ordinal of the largest vector accessible - * @param scoreCorrectionConstant the score correction constant * @param similarityType the similarity type - * @param indexInput the index input containing the vector data; + * @param input the index input containing the vector data; * offset of the first vector is 0, * the length must be (maxOrd + Float#BYTES) * dims - * @return an optional containing the vector scorer, or empty + * @param values the random access vector values + * @param scoreCorrectionConstant the score correction constant + * @return an optional containing the vector scorer supplier, or empty */ - Optional getInt7ScalarQuantizedVectorScorer( - int dims, - int maxOrd, - float scoreCorrectionConstant, + Optional getInt7ScalarQuantizedVectorScorer( VectorSimilarityType similarityType, - IndexInput indexInput + IndexInput input, + RandomAccessQuantizedByteVectorValues values, + float scoreCorrectionConstant ); } diff --git a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java b/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java index e2181a2d499cc..0b8231770490b 100644 --- a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java +++ b/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java @@ -9,6 +9,8 @@ package org.elasticsearch.vec; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import java.util.Optional; @@ -17,12 +19,11 @@ class VectorScorerFactoryImpl implements VectorScorerFactory { static final VectorScorerFactoryImpl INSTANCE = null; @Override - public Optional getInt7ScalarQuantizedVectorScorer( - int dims, - int maxOrd, - float scoreCorrectionConstant, + public Optional getInt7ScalarQuantizedVectorScorer( VectorSimilarityType similarityType, - IndexInput input + IndexInput input, + RandomAccessQuantizedByteVectorValues values, + float scoreCorrectionConstant ) { throw new UnsupportedOperationException("should not reach here"); } diff --git a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerSupplierAdapter.java b/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerSupplierAdapter.java deleted file mode 100644 index ed585d7846530..0000000000000 --- a/libs/vec/src/main/java/org/elasticsearch/vec/VectorScorerSupplierAdapter.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.vec; - -import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; - -import java.io.IOException; - -/** An adapter between VectorScorer and RandomVectorScorerSupplier. */ -public final class VectorScorerSupplierAdapter implements RandomVectorScorerSupplier { - - private final VectorScorer scorer; - - public VectorScorerSupplierAdapter(VectorScorer scorer) { - this.scorer = scorer; - } - - @Override - public RandomVectorScorer scorer(int ord) throws IOException { - return new RandomVectorScorer() { - final int firstOrd = ord; - - @Override - public float score(int otherOrd) throws IOException { - return scorer.score(firstOrd, otherOrd); - } - - @Override - public int maxOrd() { - return scorer.maxOrd(); - } - }; - } - - @Override - public RandomVectorScorerSupplier copy() throws IOException { - return new VectorScorerSupplierAdapter(scorer.copy()); - } -} diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java b/libs/vec/src/main21/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java index f0421a682fc95..a38cf1a627b3a 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/VectorScorerFactoryImpl.java @@ -9,11 +9,13 @@ package org.elasticsearch.vec; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.vec.internal.IndexInputUtils; -import org.elasticsearch.vec.internal.Int7DotProduct; -import org.elasticsearch.vec.internal.Int7Euclidean; -import org.elasticsearch.vec.internal.Int7MaximumInnerProduct; +import org.elasticsearch.vec.internal.Int7SQVectorScorerSupplier.DotProductSupplier; +import org.elasticsearch.vec.internal.Int7SQVectorScorerSupplier.EuclideanSupplier; +import org.elasticsearch.vec.internal.Int7SQVectorScorerSupplier.MaxInnerProductSupplier; import java.util.Optional; @@ -28,21 +30,27 @@ private VectorScorerFactoryImpl() {} } @Override - public Optional getInt7ScalarQuantizedVectorScorer( - int dims, - int maxOrd, - float scoreCorrectionConstant, + public Optional getInt7ScalarQuantizedVectorScorer( VectorSimilarityType similarityType, - IndexInput input + IndexInput input, + RandomAccessQuantizedByteVectorValues values, + float scoreCorrectionConstant ) { input = IndexInputUtils.unwrapAndCheckInputOrNull(input); if (input == null) { return Optional.empty(); // the input type is not MemorySegment based } - return Optional.of(switch (similarityType) { - case COSINE, DOT_PRODUCT -> new Int7DotProduct(dims, maxOrd, scoreCorrectionConstant, input); - case EUCLIDEAN -> new Int7Euclidean(dims, maxOrd, scoreCorrectionConstant, input); - case MAXIMUM_INNER_PRODUCT -> new Int7MaximumInnerProduct(dims, maxOrd, scoreCorrectionConstant, input); - }); + checkInvariants(values.size(), values.dimension(), input); + return switch (similarityType) { + case COSINE, DOT_PRODUCT -> Optional.of(new DotProductSupplier(input, values, scoreCorrectionConstant)); + case EUCLIDEAN -> Optional.of(new EuclideanSupplier(input, values, scoreCorrectionConstant)); + case MAXIMUM_INNER_PRODUCT -> Optional.of(new MaxInnerProductSupplier(input, values, scoreCorrectionConstant)); + }; + } + + static void checkInvariants(int maxOrd, int vectorByteLength, IndexInput input) { + if (input.length() < (long) vectorByteLength * maxOrd) { + throw new IllegalArgumentException("input length is less than expected vector data"); + } } } diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java deleted file mode 100644 index 2be0aa53f7c57..0000000000000 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.vec.internal; - -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; -import org.elasticsearch.nativeaccess.NativeAccess; -import org.elasticsearch.nativeaccess.VectorSimilarityFunctions; -import org.elasticsearch.vec.VectorScorer; - -import java.io.IOException; -import java.lang.foreign.MemorySegment; -import java.lang.invoke.MethodHandle; - -abstract sealed class AbstractInt7ScalarQuantizedVectorScorer implements VectorScorer permits Int7DotProduct, Int7Euclidean, - Int7MaximumInnerProduct { - - static final VectorSimilarityFunctions DISTANCE_FUNCS = NativeAccess.instance() - .getVectorSimilarityFunctions() - .orElseThrow(AssertionError::new); - - protected final int dims; - protected final int maxOrd; - protected final float scoreCorrectionConstant; - protected final IndexInput input; - protected final MemorySegment segment; - protected final MemorySegment[] segments; - protected final long offset; - protected final int chunkSizePower; - protected final long chunkSizeMask; - - private final ScalarQuantizedVectorSimilarity fallbackScorer; - - protected AbstractInt7ScalarQuantizedVectorScorer( - int dims, - int maxOrd, - float scoreCorrectionConstant, - IndexInput input, - ScalarQuantizedVectorSimilarity fallbackScorer - ) { - this.dims = dims; - this.maxOrd = maxOrd; - this.scoreCorrectionConstant = scoreCorrectionConstant; - this.input = input; - this.fallbackScorer = fallbackScorer; - - this.segments = IndexInputUtils.segmentArray(input); - if (segments.length == 1) { - segment = segments[0]; - offset = 0L; - } else { - segment = null; - offset = IndexInputUtils.offset(input); - } - this.chunkSizePower = IndexInputUtils.chunkSizePower(input); - this.chunkSizeMask = IndexInputUtils.chunkSizeMask(input); - } - - @Override - public final int dims() { - return dims; - } - - @Override - public final int maxOrd() { - return maxOrd; - } - - protected final void checkOrdinal(int ord) { - if (ord < 0 || ord > maxOrd) { - throw new IllegalArgumentException("illegal ordinal: " + ord); - } - } - - protected final float fallbackScore(long firstByteOffset, long secondByteOffset) throws IOException { - input.seek(firstByteOffset); - byte[] a = new byte[dims]; - input.readBytes(a, 0, a.length); - float aOffsetValue = Float.intBitsToFloat(input.readInt()); - - input.seek(secondByteOffset); - byte[] b = new byte[dims]; - input.readBytes(b, 0, a.length); - float bOffsetValue = Float.intBitsToFloat(input.readInt()); - - return fallbackScorer.score(a, aOffsetValue, b, bOffsetValue); - } - - protected final MemorySegment segmentSlice(long pos, int length) { - if (segment != null) { - // single - if (checkIndex(pos, segment.byteSize() + 1)) { - return segment.asSlice(pos, length); - } - } else { - // multi - pos = pos + this.offset; - final int si = (int) (pos >> chunkSizePower); - final MemorySegment seg = segments[si]; - long offset = pos & chunkSizeMask; - if (checkIndex(offset + length, seg.byteSize() + 1)) { - return seg.asSlice(offset, length); - } - } - return null; - } - - static boolean checkIndex(long index, long length) { - return index >= 0 && index < length; - } - - static final MethodHandle DOT_PRODUCT_7U = DISTANCE_FUNCS.dotProductHandle7u(); - static final MethodHandle SQUARE_DISTANCE_7U = DISTANCE_FUNCS.squareDistanceHandle7u(); - - static int dotProduct7u(MemorySegment a, MemorySegment b, int length) { - // assert assertSegments(a, b, length); - try { - return (int) DOT_PRODUCT_7U.invokeExact(a, b, length); - } catch (Throwable e) { - if (e instanceof Error err) { - throw err; - } else if (e instanceof RuntimeException re) { - throw re; - } else { - throw new RuntimeException(e); - } - } - } - - static int squareDistance7u(MemorySegment a, MemorySegment b, int length) { - // assert assertSegments(a, b, length); - try { - return (int) SQUARE_DISTANCE_7U.invokeExact(a, b, length); - } catch (Throwable e) { - if (e instanceof Error err) { - throw err; - } else if (e instanceof RuntimeException re) { - throw re; - } else { - throw new RuntimeException(e); - } - } - } - - static boolean assertSegments(MemorySegment a, MemorySegment b, int length) { - return a.isNative() && a.byteSize() >= length && b.isNative() && b.byteSize() >= length; - } -} diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java deleted file mode 100644 index 16be864cb8d92..0000000000000 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.vec.internal; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; - -import java.io.IOException; -import java.lang.foreign.MemorySegment; - -// Scalar Quantized vectors are inherently byte sized, so dims is equal to the length in bytes. -public final class Int7DotProduct extends AbstractInt7ScalarQuantizedVectorScorer { - - public Int7DotProduct(int dims, int maxOrd, float scoreCorrectionConstant, IndexInput input) { - super( - dims, - maxOrd, - scoreCorrectionConstant, - input, - ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.DOT_PRODUCT, scoreCorrectionConstant) - ); - } - - @Override - public float score(int firstOrd, int secondOrd) throws IOException { - checkOrdinal(firstOrd); - checkOrdinal(secondOrd); - - final int length = dims; - long firstByteOffset = (long) firstOrd * (length + Float.BYTES); - long secondByteOffset = (long) secondOrd * (length + Float.BYTES); - - MemorySegment firstSeg = segmentSlice(firstByteOffset, length); - input.seek(firstByteOffset + length); - float firstOffset = Float.intBitsToFloat(input.readInt()); - - MemorySegment secondSeg = segmentSlice(secondByteOffset, length); - input.seek(secondByteOffset + length); - float secondOffset = Float.intBitsToFloat(input.readInt()); - - if (firstSeg != null && secondSeg != null) { - int dotProduct = dotProduct7u(firstSeg, secondSeg, length); - assert dotProduct >= 0; - float adjustedDistance = dotProduct * scoreCorrectionConstant + firstOffset + secondOffset; - return Math.max((1 + adjustedDistance) / 2, 0f); - } else { - return Math.max(fallbackScore(firstByteOffset, secondByteOffset), 0f); - } - } - - @Override - public Int7DotProduct copy() { - return new Int7DotProduct(dims, maxOrd, scoreCorrectionConstant, input.clone()); - } -} diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java deleted file mode 100644 index 3bed20b70e494..0000000000000 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.vec.internal; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; - -import java.io.IOException; -import java.lang.foreign.MemorySegment; - -// Scalar Quantized vectors are inherently bytes. -public final class Int7Euclidean extends AbstractInt7ScalarQuantizedVectorScorer { - - public Int7Euclidean(int dims, int maxOrd, float scoreCorrectionConstant, IndexInput input) { - super( - dims, - maxOrd, - scoreCorrectionConstant, - input, - ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.EUCLIDEAN, scoreCorrectionConstant) - ); - } - - @Override - public float score(int firstOrd, int secondOrd) throws IOException { - checkOrdinal(firstOrd); - checkOrdinal(secondOrd); - - final int length = dims; - long firstByteOffset = (long) firstOrd * (length + Float.BYTES); - long secondByteOffset = (long) secondOrd * (length + Float.BYTES); - - MemorySegment firstSeg = segmentSlice(firstByteOffset, length); - MemorySegment secondSeg = segmentSlice(secondByteOffset, length); - - if (firstSeg != null && secondSeg != null) { - int squareDistance = squareDistance7u(firstSeg, secondSeg, length); - float adjustedDistance = squareDistance * scoreCorrectionConstant; - return 1 / (1f + adjustedDistance); - } else { - return fallbackScore(firstByteOffset, secondByteOffset); - } - } - - @Override - public Int7Euclidean copy() { - return new Int7Euclidean(dims, maxOrd, scoreCorrectionConstant, input.clone()); - } -} diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java deleted file mode 100644 index e6045f951cac3..0000000000000 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.vec.internal; - -import org.apache.lucene.index.VectorSimilarityFunction; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; - -import java.io.IOException; -import java.lang.foreign.MemorySegment; - -// Scalar Quantized vectors are inherently bytes. -public final class Int7MaximumInnerProduct extends AbstractInt7ScalarQuantizedVectorScorer { - - public Int7MaximumInnerProduct(int dims, int maxOrd, float scoreCorrectionConstant, IndexInput input) { - super( - dims, - maxOrd, - scoreCorrectionConstant, - input, - ScalarQuantizedVectorSimilarity.fromVectorSimilarity(VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant) - ); - } - - @Override - public float score(int firstOrd, int secondOrd) throws IOException { - checkOrdinal(firstOrd); - checkOrdinal(secondOrd); - - final int length = dims; - long firstByteOffset = (long) firstOrd * (length + Float.BYTES); - long secondByteOffset = (long) secondOrd * (length + Float.BYTES); - - MemorySegment firstSeg = segmentSlice(firstByteOffset, length); - input.seek(firstByteOffset + length); - float firstOffset = Float.intBitsToFloat(input.readInt()); - - MemorySegment secondSeg = segmentSlice(secondByteOffset, length); - input.seek(secondByteOffset + length); - float secondOffset = Float.intBitsToFloat(input.readInt()); - - if (firstSeg != null && secondSeg != null) { - int dotProduct = dotProduct7u(firstSeg, secondSeg, length); - float adjustedDistance = dotProduct * scoreCorrectionConstant + firstOffset + secondOffset; - return scaleMaxInnerProductScore(adjustedDistance); - } else { - return fallbackScore(firstByteOffset, secondByteOffset); - } - } - - /** - * Returns a scaled score preventing negative scores for maximum-inner-product - * @param rawSimilarity the raw similarity between two vectors - */ - static float scaleMaxInnerProductScore(float rawSimilarity) { - if (rawSimilarity < 0) { - return 1 / (1 + -1 * rawSimilarity); - } - return rawSimilarity + 1; - } - - @Override - public Int7MaximumInnerProduct copy() { - return new Int7MaximumInnerProduct(dims, maxOrd, scoreCorrectionConstant, input.clone()); - } -} diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7SQVectorScorerSupplier.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7SQVectorScorerSupplier.java new file mode 100644 index 0000000000000..f9df0843ac0af --- /dev/null +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7SQVectorScorerSupplier.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.vec.internal; + +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; + +import java.io.IOException; +import java.lang.foreign.MemorySegment; + +import static org.apache.lucene.index.VectorSimilarityFunction.DOT_PRODUCT; +import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; +import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT; +import static org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity.fromVectorSimilarity; + +public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorScorerSupplier { + + final int dims; + final int maxOrd; + final float scoreCorrectionConstant; + final IndexInput input; + final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds + final ScalarQuantizedVectorSimilarity fallbackScorer; + + final MemorySegment segment; + final MemorySegment[] segments; + final long offset; + final int chunkSizePower; + final long chunkSizeMask; + + protected Int7SQVectorScorerSupplier( + IndexInput input, + RandomAccessQuantizedByteVectorValues values, + float scoreCorrectionConstant, + ScalarQuantizedVectorSimilarity fallbackScorer + ) { + this.input = input; + this.values = values; + this.dims = values.dimension(); + this.maxOrd = values.size(); + this.scoreCorrectionConstant = scoreCorrectionConstant; + this.fallbackScorer = fallbackScorer; + + this.segments = IndexInputUtils.segmentArray(input); + if (segments.length == 1) { + segment = segments[0]; + offset = 0L; + } else { + segment = null; + offset = IndexInputUtils.offset(input); + } + this.chunkSizePower = IndexInputUtils.chunkSizePower(input); + this.chunkSizeMask = IndexInputUtils.chunkSizeMask(input); + } + + protected final void checkOrdinal(int ord) { + if (ord < 0 || ord > maxOrd) { + throw new IllegalArgumentException("illegal ordinal: " + ord); + } + } + + final float scoreFromOrds(int firstOrd, int secondOrd) throws IOException { + checkOrdinal(firstOrd); + checkOrdinal(secondOrd); + + final int length = dims; + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); + + MemorySegment firstSeg = segmentSlice(firstByteOffset, length); + if (firstSeg == null) { + return fallbackScore(firstByteOffset, secondByteOffset); + } + input.seek(firstByteOffset + length); + float firstOffset = Float.intBitsToFloat(input.readInt()); + + MemorySegment secondSeg = segmentSlice(secondByteOffset, length); + if (secondSeg == null) { + return fallbackScore(firstByteOffset, secondByteOffset); + } + input.seek(secondByteOffset + length); + float secondOffset = Float.intBitsToFloat(input.readInt()); + + return scoreFromSegments(firstSeg, firstOffset, secondSeg, secondOffset); + } + + abstract float scoreFromSegments(MemorySegment a, float aOffset, MemorySegment b, float bOffset); + + protected final float fallbackScore(long firstByteOffset, long secondByteOffset) throws IOException { + input.seek(firstByteOffset); + byte[] a = new byte[dims]; + input.readBytes(a, 0, a.length); + float aOffsetValue = Float.intBitsToFloat(input.readInt()); + + input.seek(secondByteOffset); + byte[] b = new byte[dims]; + input.readBytes(b, 0, a.length); + float bOffsetValue = Float.intBitsToFloat(input.readInt()); + + return fallbackScorer.score(a, aOffsetValue, b, bOffsetValue); + } + + @Override + public RandomVectorScorer scorer(int ord) { + checkOrdinal(ord); + return new RandomVectorScorer.AbstractRandomVectorScorer<>(values) { + @Override + public float score(int node) throws IOException { + return scoreFromOrds(ord, node); + } + }; + } + + protected final MemorySegment segmentSlice(long pos, int length) { + if (segment != null) { + // single + if (checkIndex(pos, segment.byteSize() + 1)) { + return segment.asSlice(pos, length); + } + } else { + // multi + pos = pos + this.offset; + final int si = (int) (pos >> chunkSizePower); + final MemorySegment seg = segments[si]; + long offset = pos & chunkSizeMask; + if (checkIndex(offset + length, seg.byteSize() + 1)) { + return seg.asSlice(offset, length); + } + } + return null; + } + + public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier { + + public EuclideanSupplier(IndexInput input, RandomAccessQuantizedByteVectorValues values, float scoreCorrectionConstant) { + super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant)); + } + + @Override + float scoreFromSegments(MemorySegment a, float aOffset, MemorySegment b, float bOffset) { + int squareDistance = Similarities.squareDistance7u(a, b, dims); + float adjustedDistance = squareDistance * scoreCorrectionConstant; + return 1 / (1f + adjustedDistance); + } + + @Override + public EuclideanSupplier copy() { + return new EuclideanSupplier(input.clone(), values, scoreCorrectionConstant); + } + } + + // This will be removed when we upgrade to 9.11, see https://github.com/apache/lucene/pull/13356 + static final class DelegateDotScorer implements ScalarQuantizedVectorSimilarity { + final ScalarQuantizedVectorSimilarity delegate; + + DelegateDotScorer(float scoreCorrectionConstant) { + delegate = fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant); + } + + @Override + public float score(byte[] queryVector, float queryVectorOffset, byte[] storedVector, float vectorOffset) { + return Math.max(delegate.score(queryVector, queryVectorOffset, storedVector, vectorOffset), 0f); + } + } + + public static final class DotProductSupplier extends Int7SQVectorScorerSupplier { + + public DotProductSupplier(IndexInput input, RandomAccessQuantizedByteVectorValues values, float scoreCorrectionConstant) { + super(input, values, scoreCorrectionConstant, new DelegateDotScorer(scoreCorrectionConstant)); + } + + @Override + float scoreFromSegments(MemorySegment a, float aOffset, MemorySegment b, float bOffset) { + int dotProduct = Similarities.dotProduct7u(a, b, dims); + assert dotProduct >= 0; + float adjustedDistance = dotProduct * scoreCorrectionConstant + aOffset + bOffset; + return Math.max((1 + adjustedDistance) / 2, 0f); + } + + @Override + public DotProductSupplier copy() { + return new DotProductSupplier(input.clone(), values, scoreCorrectionConstant); + } + } + + public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier { + + public MaxInnerProductSupplier(IndexInput input, RandomAccessQuantizedByteVectorValues values, float scoreCorrectionConstant) { + super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant)); + } + + @Override + float scoreFromSegments(MemorySegment a, float aOffset, MemorySegment b, float bOffset) { + int dotProduct = Similarities.dotProduct7u(a, b, dims); + assert dotProduct >= 0; + float adjustedDistance = dotProduct * scoreCorrectionConstant + aOffset + bOffset; + if (adjustedDistance < 0) { + return 1 / (1 + -1 * adjustedDistance); + } + return adjustedDistance + 1; + } + + @Override + public MaxInnerProductSupplier copy() { + return new MaxInnerProductSupplier(input.clone(), values, scoreCorrectionConstant); + } + } + + static boolean checkIndex(long index, long length) { + return index >= 0 && index < length; + } +} diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Similarities.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Similarities.java new file mode 100644 index 0000000000000..d0333931ce22c --- /dev/null +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Similarities.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.vec.internal; + +import org.elasticsearch.nativeaccess.NativeAccess; +import org.elasticsearch.nativeaccess.VectorSimilarityFunctions; + +import java.lang.foreign.MemorySegment; +import java.lang.invoke.MethodHandle; + +public class Similarities { + + static final VectorSimilarityFunctions DISTANCE_FUNCS = NativeAccess.instance() + .getVectorSimilarityFunctions() + .orElseThrow(AssertionError::new); + + static final MethodHandle DOT_PRODUCT_7U = DISTANCE_FUNCS.dotProductHandle7u(); + static final MethodHandle SQUARE_DISTANCE_7U = DISTANCE_FUNCS.squareDistanceHandle7u(); + + static int dotProduct7u(MemorySegment a, MemorySegment b, int length) { + assert assertSegments(a, b, length); + try { + return (int) DOT_PRODUCT_7U.invokeExact(a, b, length); + } catch (Throwable e) { + if (e instanceof Error err) { + throw err; + } else if (e instanceof RuntimeException re) { + throw re; + } else { + throw new RuntimeException(e); + } + } + } + + static int squareDistance7u(MemorySegment a, MemorySegment b, int length) { + assert assertSegments(a, b, length); + try { + return (int) SQUARE_DISTANCE_7U.invokeExact(a, b, length); + } catch (Throwable e) { + if (e instanceof Error err) { + throw err; + } else if (e instanceof RuntimeException re) { + throw re; + } else { + throw new RuntimeException(e); + } + } + } + + static boolean assertSegments(MemorySegment a, MemorySegment b, int length) { + return a.isNative() && a.byteSize() >= length && b.isNative() && b.byteSize() >= length; + } +} diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 8c010295764d5..987ec6b494f0c 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -10,12 +10,15 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; +import org.apache.lucene.codecs.lucene99.OffHeapQuantizedByteVectorValues; +import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import java.io.IOException; import java.util.Arrays; @@ -65,12 +68,12 @@ void testSimpleImpl(long maxChunkSize) throws IOException { assumeTrue(notSupportedMsg(), supported()); var factory = AbstractVectorTestCase.factory.get(); - try (Directory dir = new MMapDirectory(createTempDir(getTestName()), maxChunkSize)) { + try (Directory dir = new MMapDirectory(createTempDir("testSimpleImpl"), maxChunkSize)) { for (int dims : List.of(31, 32, 33)) { // dimensions that cross the scalar / native boundary (stride) byte[] vec1 = new byte[dims]; byte[] vec2 = new byte[dims]; - String fileName = getTestName() + "-" + dims; + String fileName = "testSimpleImpl" + "-" + dims; try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { for (int i = 0; i < dims; i++) { vec1[i] = (byte) i; @@ -81,26 +84,12 @@ void testSimpleImpl(long maxChunkSize) throws IOException { out.writeBytes(bytes, 0, bytes.length); } try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { - // dot product - float expected = luceneScore(DOT_PRODUCT, vec1, vec2, 1, 1, 1); - var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, 2, 1, DOT_PRODUCT, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); - // max inner product - expected = luceneScore(MAXIMUM_INNER_PRODUCT, vec1, vec2, 1, 1, 1); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, 2, 1, MAXIMUM_INNER_PRODUCT, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); - // cosine - expected = luceneScore(COSINE, vec1, vec2, 1, 1, 1); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, 2, 1, COSINE, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); - // euclidean - expected = luceneScore(EUCLIDEAN, vec1, vec2, 1, 1, 1); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, 2, 1, EUCLIDEAN, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + for (var sim : List.of(COSINE, DOT_PRODUCT, EUCLIDEAN, MAXIMUM_INNER_PRODUCT)) { + var values = vectorValues(dims, 2, in, VectorSimilarityType.of(sim)); + float expected = luceneScore(sim, vec1, vec2, 1, 1, 1); + var supplier = factory.getInt7ScalarQuantizedVectorScorer(sim, in, values, 1).get(); + assertThat(supplier.scorer(0).score(1), equalTo(expected)); + } } } } @@ -110,43 +99,40 @@ public void testNonNegativeDotProduct() throws IOException { assumeTrue(notSupportedMsg(), supported()); var factory = AbstractVectorTestCase.factory.get(); - try (Directory dir = new MMapDirectory(createTempDir(getTestName()), MMapDirectory.DEFAULT_MAX_CHUNK_SIZE)) { + try (Directory dir = new MMapDirectory(createTempDir("testNonNegativeDotProduct"), MMapDirectory.DEFAULT_MAX_CHUNK_SIZE)) { // keep vecs `0` so dot product is `0` byte[] vec1 = new byte[32]; byte[] vec2 = new byte[32]; - String fileName = getTestName() + "-32"; + String fileName = "testNonNegativeDotProduct-32"; try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { var negativeOffset = floatToByteArray(-5f); byte[] bytes = concat(vec1, negativeOffset, vec2, negativeOffset); out.writeBytes(bytes, 0, bytes.length); } try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + var values = vectorValues(32, 2, in, VectorSimilarityType.of(DOT_PRODUCT)); // dot product float expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(DOT_PRODUCT, vec1, vec2, // 1, -5, -5); - var scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, DOT_PRODUCT, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + var supplier = factory.getInt7ScalarQuantizedVectorScorer(DOT_PRODUCT, in, values, 1).get(); + assertThat(supplier.scorer(0).score(1), equalTo(expected)); + assertThat(supplier.scorer(0).score(1), greaterThanOrEqualTo(0f)); // max inner product expected = luceneScore(MAXIMUM_INNER_PRODUCT, vec1, vec2, 1, -5, -5); - scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, MAXIMUM_INNER_PRODUCT, in).get(); - assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + supplier = factory.getInt7ScalarQuantizedVectorScorer(MAXIMUM_INNER_PRODUCT, in, values, 1).get(); + assertThat(supplier.scorer(0).score(1), greaterThanOrEqualTo(0f)); + assertThat(supplier.scorer(0).score(1), equalTo(expected)); // cosine expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(COSINE, vec1, vec2, 1, -5, // -5); - scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, COSINE, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + supplier = factory.getInt7ScalarQuantizedVectorScorer(COSINE, in, values, 1).get(); + assertThat(supplier.scorer(0).score(1), equalTo(expected)); + assertThat(supplier.scorer(0).score(1), greaterThanOrEqualTo(0f)); // euclidean expected = luceneScore(EUCLIDEAN, vec1, vec2, 1, -5, -5); - scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, EUCLIDEAN, in).get(); - assertThat(scorer.score(0, 1), equalTo(expected)); - assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + supplier = factory.getInt7ScalarQuantizedVectorScorer(EUCLIDEAN, in, values, 1).get(); + assertThat(supplier.scorer(0).score(1), equalTo(expected)); + assertThat(supplier.scorer(0).score(1), greaterThanOrEqualTo(0f)); } } } @@ -176,49 +162,35 @@ public void testRandomMin() throws IOException { void testRandom(long maxChunkSize, Function byteArraySupplier) throws IOException { var factory = AbstractVectorTestCase.factory.get(); - try (Directory dir = new MMapDirectory(createTempDir(getTestName()), maxChunkSize)) { - for (int times = 0; times < TIMES; times++) { - final int dims = randomIntBetween(1, 4096); - final int size = randomIntBetween(2, 100); - final float correction = randomFloat(); - final byte[][] vectors = new byte[size][]; - final float[] offsets = new float[size]; + try (Directory dir = new MMapDirectory(createTempDir("testRandom"), maxChunkSize)) { + final int dims = randomIntBetween(1, 4096); + final int size = randomIntBetween(2, 100); + final float correction = randomFloat(); + final byte[][] vectors = new byte[size][]; + final float[] offsets = new float[size]; - String fileName = getTestName() + "-" + times + "-" + dims; - logger.info("Testing " + fileName); - try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { - for (int i = 0; i < size; i++) { - var vec = byteArraySupplier.apply(dims); - var off = randomFloat(); - out.writeBytes(vec, 0, vec.length); - out.writeInt(Float.floatToIntBits(off)); - vectors[i] = vec; - offsets[i] = off; - } + String fileName = "testRandom-" + dims; + logger.info("Testing " + fileName); + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + for (int i = 0; i < size; i++) { + var vec = byteArraySupplier.apply(dims); + var off = randomFloat(); + out.writeBytes(vec, 0, vec.length); + out.writeInt(Float.floatToIntBits(off)); + vectors[i] = vec; + offsets[i] = off; } - try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + for (int times = 0; times < TIMES; times++) { int idx0 = randomIntBetween(0, size - 1); int idx1 = randomIntBetween(0, size - 1); // may be the same as idx0 - which is ok. - // dot product - float expected = luceneScore(DOT_PRODUCT, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, DOT_PRODUCT, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // max inner product - expected = luceneScore(MAXIMUM_INNER_PRODUCT, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, MAXIMUM_INNER_PRODUCT, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // cosine - expected = luceneScore(COSINE, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, COSINE, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // euclidean - expected = luceneScore(EUCLIDEAN, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, EUCLIDEAN, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + for (var sim : List.of(COSINE, DOT_PRODUCT, EUCLIDEAN, MAXIMUM_INNER_PRODUCT)) { + var values = vectorValues(dims, size, in, VectorSimilarityType.of(sim)); + float expected = luceneScore(sim, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); + var supplier = factory.getInt7ScalarQuantizedVectorScorer(sim, in, values, correction).get(); + assertThat(supplier.scorer(idx0).score(idx1), equalTo(expected)); + } } } } @@ -233,14 +205,14 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi throws IOException { var factory = AbstractVectorTestCase.factory.get(); - try (Directory dir = new MMapDirectory(createTempDir(getTestName()), maxChunkSize)) { + try (Directory dir = new MMapDirectory(createTempDir("testRandomSliceImpl"), maxChunkSize)) { for (int times = 0; times < TIMES; times++) { final int size = randomIntBetween(2, 100); final float correction = randomFloat(); final byte[][] vectors = new byte[size][]; final float[] offsets = new float[size]; - String fileName = getTestName() + "-" + times + "-" + dims; + String fileName = "testRandomSliceImpl-" + times + "-" + dims; logger.info("Testing " + fileName); try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { byte[] ba = new byte[initialPadding]; @@ -258,28 +230,16 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi var outter = dir.openInput(fileName, IOContext.DEFAULT); var in = outter.slice("slice", initialPadding, outter.length() - initialPadding) ) { - int idx0 = randomIntBetween(0, size - 1); - int idx1 = randomIntBetween(0, size - 1); // may be the same as idx0 - which is ok. - // dot product - float expected = luceneScore(DOT_PRODUCT, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, DOT_PRODUCT, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // max inner product - expected = luceneScore(MAXIMUM_INNER_PRODUCT, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, MAXIMUM_INNER_PRODUCT, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // cosine - expected = luceneScore(COSINE, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, COSINE, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // euclidean - expected = luceneScore(EUCLIDEAN, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, EUCLIDEAN, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + for (int itrs = 0; itrs < TIMES / 10; itrs++) { + int idx0 = randomIntBetween(0, size - 1); + int idx1 = randomIntBetween(0, size - 1); // may be the same as idx0 - which is ok. + for (var sim : List.of(COSINE, DOT_PRODUCT, EUCLIDEAN, MAXIMUM_INNER_PRODUCT)) { + var values = vectorValues(dims, size, in, VectorSimilarityType.of(sim)); + float expected = luceneScore(sim, vectors[idx0], vectors[idx1], correction, offsets[idx0], offsets[idx1]); + var supplier = factory.getInt7ScalarQuantizedVectorScorer(sim, in, values, correction).get(); + assertThat(supplier.scorer(idx0).score(idx1), equalTo(expected)); + } + } } } } @@ -290,12 +250,12 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi public void testLarge() throws IOException { var factory = AbstractVectorTestCase.factory.get(); - try (Directory dir = new MMapDirectory(createTempDir(getTestName()))) { + try (Directory dir = new MMapDirectory(createTempDir("testLarge"))) { final int dims = 8192; final int size = 262144; final float correction = randomFloat(); - String fileName = getTestName() + "-" + dims; + String fileName = "testLarge-" + dims; logger.info("Testing " + fileName); try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { for (int i = 0; i < size; i++) { @@ -311,26 +271,12 @@ public void testLarge() throws IOException { int idx1 = size - 1; float off0 = (float) idx0; float off1 = (float) idx1; - // dot product - float expected = luceneScore(DOT_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); - var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, DOT_PRODUCT, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // max inner product - expected = luceneScore(MAXIMUM_INNER_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, MAXIMUM_INNER_PRODUCT, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // cosine - expected = luceneScore(COSINE, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, COSINE, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); - // euclidean - expected = luceneScore(EUCLIDEAN, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); - scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, EUCLIDEAN, in).get(); - assertThat(scorer.score(idx0, idx1), equalTo(expected)); - assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + for (var sim : List.of(COSINE, DOT_PRODUCT, EUCLIDEAN, MAXIMUM_INNER_PRODUCT)) { + var values = vectorValues(dims, size, in, VectorSimilarityType.of(sim)); + float expected = luceneScore(sim, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + var supplier = factory.getInt7ScalarQuantizedVectorScorer(sim, in, values, correction).get(); + assertThat(supplier.scorer(idx0).score(idx1), equalTo(expected)); + } } } } @@ -355,7 +301,7 @@ void testRaceImpl(VectorSimilarityType sim) throws Exception { IntStream.range(0, dims).forEach(i -> vec1[i] = 1); IntStream.range(0, dims).forEach(i -> vec2[i] = 2); try (Directory dir = new MMapDirectory(createTempDir("testRace"), maxChunkSize)) { - String fileName = getTestName() + "-" + dims; + String fileName = "testRace-" + dims; try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { var one = floatToByteArray(1f); byte[] bytes = concat(vec1, one, vec1, one, vec2, one, vec2, one); @@ -365,11 +311,11 @@ void testRaceImpl(VectorSimilarityType sim) throws Exception { var expectedScore2 = luceneScore(sim, vec2, vec2, 1, 1, 1); try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { - var scoreSupplier = factory.getInt7ScalarQuantizedVectorScorer(dims, 4, 1, sim, in).get(); - var scorer = new VectorScorerSupplierAdapter(scoreSupplier); + var values = vectorValues(dims, 4, in, VectorSimilarityType.of(sim)); + var scoreSupplier = factory.getInt7ScalarQuantizedVectorScorer(sim, in, values, 1f).get(); var tasks = List.>>of( - new ScoreCallable(scorer.copy().scorer(0), 1, expectedScore1), - new ScoreCallable(scorer.copy().scorer(2), 3, expectedScore2) + new ScoreCallable(scoreSupplier.copy().scorer(0), 1, expectedScore1), + new ScoreCallable(scoreSupplier.copy().scorer(2), 3, expectedScore2) ); var executor = Executors.newFixedThreadPool(2); var results = executor.invokeAll(tasks); @@ -408,6 +354,10 @@ public Optional call() throws Exception { } } + RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, in.slice("values", 0, in.length())); + } + // creates the vector based on the given ordinal, which is reproducible given the ord and dims static byte[] vector(int ord, int dims) { var random = new Random(Objects.hash(ord, dims)); diff --git a/libs/vec/src/test21/java/org/elasticsearch/vec/internal/IndexInputUtilsTests.java b/libs/vec/src/test21/java/org/elasticsearch/vec/internal/IndexInputUtilsTests.java index 874ccff50709a..0335022b807db 100644 --- a/libs/vec/src/test21/java/org/elasticsearch/vec/internal/IndexInputUtilsTests.java +++ b/libs/vec/src/test21/java/org/elasticsearch/vec/internal/IndexInputUtilsTests.java @@ -28,7 +28,7 @@ public class IndexInputUtilsTests extends ESTestCase { public void testSingleSegment() throws IOException { try (Directory dir = new MMapDirectory(createTempDir(getTestName()))) { for (int times = 0; times < TIMES; times++) { - String fileName = getTestName() + times; + String fileName = "testSingleSegment" + times; int size = randomIntBetween(10, 127); try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { byte[] ba = new byte[size]; @@ -80,7 +80,7 @@ public void testSingleSegment() throws IOException { public void testMultiSegment() throws IOException { try (Directory dir = new MMapDirectory(createTempDir(getTestName()), 32L)) { for (int times = 0; times < TIMES; times++) { - String fileName = getTestName() + times; + String fileName = "testMultiSegment" + times; int size = randomIntBetween(65, 1511); int expectedNumSegs = size / 32 + 1; try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index d52dd02fb99f4..a0375c61d7c29 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -14,7 +14,7 @@ esplugin { restResources { restApi { include 'bulk', 'count', 'search', '_common', 'indices', 'index', 'cluster', 'rank_eval', 'reindex', 'update_by_query', 'delete_by_query', - 'eql', 'data_stream', 'ingest', 'cat' + 'eql', 'data_stream', 'ingest', 'cat', 'capabilities' } } @@ -39,7 +39,7 @@ if (BuildParams.inFipsJvm){ } if (BuildParams.isSnapshotBuild() == false) { - tasks.named("internalClusterTest").configure { + tasks.withType(Test).configureEach { systemProperty 'es.failure_store_feature_flag_enabled', 'true' } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index b3cf1e7a9564a..aabe865f9fe1d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -95,6 +95,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.STARTED; +import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.SUCCESS; import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.UNKNOWN; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_DOWNSAMPLE_STATUS; import static org.elasticsearch.datastreams.DataStreamsPlugin.LIFECYCLE_CUSTOM_INDEX_METADATA_KEY; @@ -871,7 +872,7 @@ private Index maybeExecuteRollover(ClusterState state, DataStream dataStream, bo * @param indicesToExcludeForRemainingRun Indices to exclude from retention even if it would be time for them to be deleted * @return The set of indices that delete requests have been sent for */ - private Set maybeExecuteRetention(ClusterState state, DataStream dataStream, Set indicesToExcludeForRemainingRun) { + Set maybeExecuteRetention(ClusterState state, DataStream dataStream, Set indicesToExcludeForRemainingRun) { Metadata metadata = state.metadata(); DataStreamGlobalRetention globalRetention = dataStream.isSystem() ? null : globalRetentionResolver.resolve(state); List backingIndicesOlderThanRetention = dataStream.getIndicesPastRetention(metadata::index, nowSupplier, globalRetention); @@ -890,14 +891,7 @@ private Set maybeExecuteRetention(ClusterState state, DataStream dataStre IndexMetadata.DownsampleTaskStatus downsampleStatus = INDEX_DOWNSAMPLE_STATUS.get(backingIndex.getSettings()); // we don't want to delete the source index if they have an in-progress downsampling operation because the // target downsample index will remain in the system as a standalone index - if (downsampleStatus.equals(UNKNOWN)) { - indicesToBeRemoved.add(index); - - // there's an opportunity here to batch the delete requests (i.e. delete 100 indices / request) - // let's start simple and reevaluate - String indexName = backingIndex.getIndex().getName(); - deleteIndexOnce(indexName, "the lapsed [" + effectiveDataRetention + "] retention period"); - } else { + if (downsampleStatus == STARTED) { // there's an opportunity here to cancel downsampling and delete the source index now logger.trace( "Data stream lifecycle skips deleting index [{}] even though its retention period [{}] has lapsed " @@ -907,6 +901,15 @@ private Set maybeExecuteRetention(ClusterState state, DataStream dataStre effectiveDataRetention, downsampleStatus ); + } else { + // UNKNOWN is the default value, and has no real use. So index should be deleted + // SUCCESS meaning downsampling completed successfully and there is nothing in progress, so we can also delete + indicesToBeRemoved.add(index); + + // there's an opportunity here to batch the delete requests (i.e. delete 100 indices / request) + // let's start simple and reevaluate + String indexName = backingIndex.getIndex().getName(); + deleteIndexOnce(indexName, "the lapsed [" + effectiveDataRetention + "] retention period"); } } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorService.java index 0628bed0f9019..90154c1190421 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorService.java @@ -63,7 +63,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources DataStreamLifecycleHealthInfo dataStreamLifecycleHealthInfo = healthInfo.dslHealthInfo(); if (dataStreamLifecycleHealthInfo == null) { // DSL reports health information on every run, so data will eventually arrive to the health node. In the meantime, let's - // report UNKNOWN health + // report GREEN health, as there are no errors to report before the first run anyway. return createIndicator( HealthStatus.GREEN, "No data stream lifecycle health data available yet. Health information will be reported after the first run.", @@ -93,12 +93,14 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources + " repeatedly encountered errors whilst trying to advance in its lifecycle", createDetails(verbose, dataStreamLifecycleHealthInfo), STAGNATING_INDEX_IMPACT, - List.of( - new Diagnosis( - STAGNATING_BACKING_INDICES_DIAGNOSIS_DEF, - List.of(new Diagnosis.Resource(Diagnosis.Resource.Type.INDEX, affectedIndices)) + verbose + ? List.of( + new Diagnosis( + STAGNATING_BACKING_INDICES_DIAGNOSIS_DEF, + List.of(new Diagnosis.Resource(Diagnosis.Resource.Type.INDEX, affectedIndices)) + ) ) - ) + : List.of() ); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index 5bf4f958c1130..c965eb2ba2536 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -118,8 +118,10 @@ import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.TARGET_MERGE_FACTOR_VALUE; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -353,7 +355,7 @@ public void testRetentionSkippedWhilstDownsamplingInProgress() { .put(indexMetadata.getSettings()) .put( IndexMetadata.INDEX_DOWNSAMPLE_STATUS_KEY, - randomValueOtherThan(UNKNOWN, () -> randomFrom(IndexMetadata.DownsampleTaskStatus.values())) + STARTED // See: See TransportDownsampleAction#createDownsampleIndex(...) ) ); indexMetaBuilder.putCustom( @@ -1516,6 +1518,76 @@ public void testFailureStoreIsManagedEvenWhenDisabled() { ); } + public void testMaybeExecuteRetentionSuccessfulDownsampledIndex() { + String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + ClusterState state = downsampleSetup(dataStreamName, SUCCESS); + DataStream dataStream = state.metadata().dataStreams().get(dataStreamName); + String firstGenIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + + // Executing the method to be tested: + Set indicesToBeRemoved = dataStreamLifecycleService.maybeExecuteRetention(clusterService.state(), dataStream, Set.of()); + assertThat(indicesToBeRemoved, contains(state.getMetadata().index(firstGenIndexName).getIndex())); + } + + public void testMaybeExecuteRetentionDownsampledIndexInProgress() { + String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + ClusterState state = downsampleSetup(dataStreamName, STARTED); + DataStream dataStream = state.metadata().dataStreams().get(dataStreamName); + String firstGenIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + + // Executing the method to be tested: + Set indicesToBeRemoved = dataStreamLifecycleService.maybeExecuteRetention(clusterService.state(), dataStream, Set.of()); + assertThat(indicesToBeRemoved, empty()); + } + + public void testMaybeExecuteRetentionDownsampledUnknown() { + String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + ClusterState state = downsampleSetup(dataStreamName, UNKNOWN); + DataStream dataStream = state.metadata().dataStreams().get(dataStreamName); + String firstGenIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + + // Executing the method to be tested: + Set indicesToBeRemoved = dataStreamLifecycleService.maybeExecuteRetention(clusterService.state(), dataStream, Set.of()); + assertThat(indicesToBeRemoved, contains(state.getMetadata().index(firstGenIndexName).getIndex())); + } + + private ClusterState downsampleSetup(String dataStreamName, IndexMetadata.DownsampleTaskStatus status) { + // Base setup: + Metadata.Builder builder = Metadata.builder(); + DataStream dataStream = createDataStream( + builder, + dataStreamName, + 2, + settings(IndexVersion.current()).put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) + .put("index.routing_path", "@timestamp"), + DataStreamLifecycle.newBuilder() + .downsampling( + new Downsampling( + List.of(new Round(TimeValue.timeValueMillis(0), new DownsampleConfig(new DateHistogramInterval("5m")))) + ) + ) + .dataRetention(TimeValue.timeValueMillis(1)) + .build(), + now + ); + builder.put(dataStream); + + // Update the first backing index so that is appears to have been downsampled: + String firstGenIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + var imd = builder.get(firstGenIndexName); + var imdBuilder = new IndexMetadata.Builder(imd); + imdBuilder.settings(Settings.builder().put(imd.getSettings()).put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), status).build()); + builder.put(imdBuilder); + + // Attaching state: + String nodeId = "localNode"; + DiscoveryNodes.Builder nodesBuilder = buildNodes(nodeId); + nodesBuilder.masterNodeId(nodeId); + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(builder).nodes(nodesBuilder).build(); + setState(clusterService, state); + return state; + } + /* * Creates a test cluster state with the given indexName. If customDataStreamLifecycleMetadata is not null, it is added as the value * of the index's custom metadata named "data_stream_lifecycle". diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java index 4461e2ffb7f02..79596cfced99a 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java @@ -99,6 +99,25 @@ public void testYellowWhenStagnatingIndicesPresent() { assertThat(diagnosis.affectedResources().get(0).getValues(), containsInAnyOrder(secondGenerationIndex, firstGenerationIndex)); } + public void testSkippingFieldsWhenVerboseIsFalse() { + String secondGenerationIndex = DataStream.getDefaultBackingIndexName("foo", 2L); + String firstGenerationIndex = DataStream.getDefaultBackingIndexName("foo", 1L); + HealthIndicatorResult result = service.calculate( + false, + constructHealthInfo( + new DataStreamLifecycleHealthInfo( + List.of(new DslErrorInfo(secondGenerationIndex, 1L, 200), new DslErrorInfo(firstGenerationIndex, 3L, 100)), + 15 + ) + ) + ); + assertThat(result.status(), is(HealthStatus.YELLOW)); + assertThat(result.symptom(), is("2 backing indices have repeatedly encountered errors whilst trying to advance in its lifecycle")); + assertThat(result.details(), is(HealthIndicatorDetails.EMPTY)); + assertThat(result.impacts(), is(STAGNATING_INDEX_IMPACT)); + assertThat(result.diagnosisList().isEmpty(), is(true)); + } + private HealthInfo constructHealthInfo(DataStreamLifecycleHealthInfo dslHealthInfo) { return new HealthInfo(Map.of(), dslHealthInfo, Map.of()); } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 91d23afa67af9..11889a3153a98 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -3,7 +3,7 @@ setup: - requires: cluster_features: ["gte_v8.15.0"] reason: "data stream failure stores REST structure changed in 8.15+" - test_runner_features: allowed_warnings + test_runner_features: [allowed_warnings, contains, capabilities] - do: allowed_warnings: @@ -27,13 +27,24 @@ setup: name: data-stream-for-rollover --- +teardown: + - do: + indices.delete_data_stream: + name: data-stream-for-lazy-rollover + ignore: 404 + + - do: + ingest.delete_pipeline: + id: failing_pipeline + ignore: 404 +--- "Roll over a data stream's failure store without conditions": - # rollover data stream to create new backing index - do: indices.rollover: alias: "data-stream-for-rollover" target_failure_store: true + - match: { acknowledged: true } - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } - match: { rolled_over: true } @@ -54,7 +65,6 @@ setup: --- "Roll over a data stream's failure store with conditions": - # index first document and wait for refresh - do: index: index: data-stream-for-rollover @@ -63,7 +73,6 @@ setup: '@timestamp': '2020-12-12' count: 'invalid value' - # rollover data stream to create new backing index - do: indices.rollover: alias: "data-stream-for-rollover" @@ -72,6 +81,7 @@ setup: conditions: max_docs: 1 + - match: { acknowledged: true } - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } - match: { rolled_over: true } @@ -92,7 +102,6 @@ setup: --- "Don't roll over a data stream's failure store when conditions aren't met": - # rollover data stream to create new backing index - do: indices.rollover: alias: "data-stream-for-rollover" @@ -101,14 +110,219 @@ setup: conditions: max_docs: 1 + - match: { acknowledged: false } + - match: { rolled_over: false } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + +--- +"Lazily roll over a data stream's failure store after a shard failure": + - requires: + reason: "data stream failure store lazy rollover only supported in 8.15+" + test_runner_features: [allowed_warnings, capabilities] + capabilities: + - method: POST + path: /{index}/_rollover + capabilities: [lazy-rollover-failure-store] + + # Mark the failure store for lazy rollover + - do: + indices.rollover: + alias: "data-stream-for-rollover" + target_failure_store: true + lazy: true + + - match: { acknowledged: true } + - match: { rolled_over: false } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + index: + index: data-stream-for-rollover + refresh: true + body: + '@timestamp': '2020-12-12' + count: 'invalid value' + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. + - match: { data_streams.0.generation: 2 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 2 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + + - do: + search: + index: .fs-data-stream-for-rollover-* + - length: { hits.hits: 1 } + - match: { hits.hits.0._index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } + - exists: hits.hits.0._source.@timestamp + - not_exists: hits.hits.0._source.count + - match: { hits.hits.0._source.document.index: 'data-stream-for-rollover' } + - match: { hits.hits.0._source.document.source.@timestamp: '2020-12-12' } + - match: { hits.hits.0._source.document.source.count: 'invalid value' } + - match: { hits.hits.0._source.error.type: 'document_parsing_exception' } + +--- +"Lazily roll over a data stream's failure store after an ingest failure": + - requires: + reason: "data stream failure store lazy rollover only supported in 8.15+" + test_runner_features: [allowed_warnings, capabilities] + capabilities: + - method: POST + path: /{index}/_rollover + capabilities: [lazy-rollover-failure-store] + + - do: + ingest.put_pipeline: + id: "failing_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "fail" : { + "message" : "error_message" + } + } + ] + } + - match: { acknowledged: true } + + - do: + allowed_warnings: + - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [data-*] + data_stream: + failure_store: true + template: + settings: + index: + default_pipeline: "failing_pipeline" + + - do: + indices.create_data_stream: + name: data-stream-for-lazy-rollover + + # Mark the failure store for lazy rollover + - do: + indices.rollover: + alias: data-stream-for-lazy-rollover + target_failure_store: true + lazy: true + + - match: { acknowledged: true } - match: { rolled_over: false } - match: { dry_run: false } + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-lazy-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-lazy-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-lazy-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + index: + index: data-stream-for-lazy-rollover + refresh: true + body: + '@timestamp': '2020-12-12' + count: 1 + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-lazy-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. + - match: { data_streams.0.generation: 2 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-lazy-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 2 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-lazy-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.indices.1.index_name: '/\.fs-data-stream-for-lazy-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + +--- +"A failure store marked for lazy rollover should only be rolled over when there is a failure": + - requires: + reason: "data stream failure store lazy rollover only supported in 8.15+" + test_runner_features: [allowed_warnings, capabilities] + capabilities: + - method: POST + path: /{index}/_rollover + capabilities: [lazy-rollover-failure-store] + + # Mark the failure store for lazy rollover + - do: + indices.rollover: + alias: "data-stream-for-rollover" + target_failure_store: true + lazy: true + + - match: { acknowledged: true } + - match: { rolled_over: false } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + index: + index: data-stream-for-rollover + refresh: true + body: + '@timestamp': '2020-12-12' + count: 3 + - do: indices.get_data_stream: name: "*" - match: { data_streams.0.name: data-stream-for-rollover } - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. - match: { data_streams.0.generation: 1 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java index 46ff50063f969..067b341d6394c 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapperTests.java @@ -827,12 +827,25 @@ protected boolean supportsIgnoreMalformed() { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean syntheticSource) { return new SyntheticSourceSupport() { + @Override + public boolean preservesExactSource() { + return true; + } + public SyntheticSourceExample example(int maxValues) { - String value = rarely() + if (randomBoolean()) { + var value = generateValue(); + return new SyntheticSourceExample(value, value, this::mapping); + } + + var array = randomList(1, 5, this::generateValue); + return new SyntheticSourceExample(array, array, this::mapping); + } + + private Object generateValue() { + return rarely() ? null : randomList(0, 10, () -> randomAlphaOfLengthBetween(0, 10)).stream().collect(Collectors.joining(" ")); - - return new SyntheticSourceExample(value, value, this::mapping); } private void mapping(XContentBuilder b) throws IOException { diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/30_synthetic_source.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/30_synthetic_source.yml index 1f2daf5ec37f5..75397bd9e0fe9 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/30_synthetic_source.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/30_synthetic_source.yml @@ -30,6 +30,13 @@ setup: body: a_field: null + - do: + index: + index: test + id: "3" + body: + a_field: ["quick brown", "fox", "jumps"] + - do: indices.refresh: {} @@ -46,3 +53,10 @@ setup: id: "2" - match: { _source.a_field: null } + + - do: + get: + index: test + id: "3" + + - match: { _source.a_field: ["quick brown", "fox", "jumps"] } diff --git a/muted-tests.yml b/muted-tests.yml index 628d57d250768..4f64492466375 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -23,6 +23,18 @@ tests: issue: "https://github.com/elastic/elasticsearch/issues/108857" method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ \ dependent mappings / dates}" +- class: "org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT" + issue: "https://github.com/elastic/elasticsearch/issues/108950" + method: "test {p0=health/10_basic/cluster health basic test}" +- class: "org.elasticsearch.upgrades.SearchStatesIT" + issue: "https://github.com/elastic/elasticsearch/issues/108991" + method: "testCanMatch" +- class: "org.elasticsearch.upgrades.AggregationsIT" + issue: "https://github.com/elastic/elasticsearch/issues/108992" + method: "testHistogram" +- class: "org.elasticsearch.upgrades.MlTrainedModelsUpgradeIT" + issue: "https://github.com/elastic/elasticsearch/issues/108993" + method: "testTrainedModelInference" # Examples: # # Mute a single test case in a YAML test suite: diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/BootstrapCheckTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/BootstrapCheckTests.java new file mode 100644 index 0000000000000..28f9fdb577dc9 --- /dev/null +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/BootstrapCheckTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.packaging.test; + +import org.elasticsearch.packaging.util.ServerUtils; +import org.elasticsearch.packaging.util.docker.DockerRun; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; + +import static org.elasticsearch.packaging.util.docker.Docker.runContainer; +import static org.elasticsearch.packaging.util.docker.DockerRun.builder; + +public class BootstrapCheckTests extends PackagingTestCase { + + public void test10Install() throws Exception { + install(); + } + + public void test20RunWithBootstrapChecks() throws Exception { + configureBootstrapChecksAndRun( + Map.of( + "xpack.security.enabled", + "false", + "xpack.security.http.ssl.enabled", + "false", + "xpack.security.enrollment.enabled", + "false", + "discovery.type", + "single-node" + ) + ); + stopElasticsearch(); + } + + private void configureBootstrapChecksAndRun(Map settings) throws Exception { + if (distribution().isDocker()) { + DockerRun builder = builder().envVar("ES_JAVA_OPTS", "-Des.enforce.bootstrap.checks=true"); + settings.forEach(builder::envVar); + runContainer(distribution(), builder); + } else { + Path jvmOptionsDir = installation.config.resolve("jvm.options.d"); + Path enableBootstrap = jvmOptionsDir.resolve("enable_bootstrap.options"); + Files.writeString(enableBootstrap, "-Des.enforce.bootstrap.checks=true"); + + for (var setting : settings.entrySet()) { + ServerUtils.addSettingToExistingConfiguration(installation.config, setting.getKey(), setting.getValue()); + } + ServerUtils.removeSettingFromExistingConfiguration(installation.config, "cluster.initial_master_nodes"); + } + + startElasticsearch(); + } +} diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java index 8b59483a2c7f0..e6ed9e3f79e9e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -110,6 +110,13 @@ public static void waitForElasticsearch(Installation installation) throws Except */ private static HttpResponse execute(Request request, String username, String password, Path caCert) throws Exception { final Executor executor; + logger.info( + "Executing request [{}] with username/password [{}/{}] and caCert [{}]", + request.toString(), + username, + password, + caCert + ); if (caCert != null) { try (InputStream inStream = Files.newInputStream(caCert)) { CertificateFactory cf = CertificateFactory.getInstance("X.509"); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index feb95b5eb2d93..41c61edde02be 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -40,7 +40,6 @@ public class DockerRun { private DockerRun() {} public static DockerRun builder() { - // Disable this setting by default in the Docker tests return new DockerRun().envVar("ingest.geoip.downloader.enabled", "false"); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json index 1620b955b8433..b29bf5304f782 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/shutdown.get_node.json @@ -29,6 +29,11 @@ } ] }, - "params":{} + "params": { + "master_timeout": { + "type": "time", + "description": "Timeout for processing on master node" + } + } } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml index f217834e62a5b..2e9d70c501b47 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml @@ -92,7 +92,7 @@ body: order: 0 version: 1 - index_patterns: t* + index_patterns: test* settings: number_of_shards: 1 number_of_replicas: 0 @@ -103,7 +103,7 @@ body: order: 2 version: 1 - index_patterns: tea* + index_patterns: nomatch* settings: number_of_shards: 1 number_of_replicas: 0 @@ -116,7 +116,7 @@ $body: | /^ test \s+ - \[t\*\] \s+ + \[test\*\] \s+ 0 \s+ 1 \s* \n @@ -134,7 +134,7 @@ body: order: 0 version: 1 - index_patterns: t* + index_patterns: test* settings: number_of_shards: 1 number_of_replicas: 0 @@ -154,7 +154,7 @@ composed_of \n test \s+ - \[t\*\] \s+ + \[test\*\] \s+ 0 \s+ 1 \s* \n @@ -172,7 +172,7 @@ body: order: 0 version: 1 - index_patterns: t* + index_patterns: test* settings: number_of_shards: 1 number_of_replicas: 0 @@ -190,7 +190,7 @@ index_patterns \n test \s+ - \[t\*\] + \[test*\*\] \n $/ @@ -206,7 +206,7 @@ name: test body: order: 0 - index_patterns: t* + index_patterns: test* settings: number_of_shards: 1 number_of_replicas: 0 @@ -217,7 +217,7 @@ body: order: 0 version: 1 - index_patterns: te* + index_patterns: test-* settings: number_of_shards: 1 number_of_replicas: 0 @@ -230,8 +230,8 @@ - match: $body: | /^ - test \s+ \[t\*\] \s+ \n \n - test_1 \s+ \[te\*\] \s+ 1 \n \n + test \s+ \[test\*\] \s+ \n \n + test_1 \s+ \[test-\*\] \s+ 1 \n \n $/ - do: @@ -242,8 +242,8 @@ - match: $body: | /^ - test_1 \s+ \[te\*\] \s+ 1\n \n - test \s+ \[t\*\] \s+ \n \n + test_1 \s+ \[test-\*\] \s+ 1\n \n + test \s+ \[test\*\] \s+ \n \n $/ @@ -260,7 +260,7 @@ body: order: 0 version: 1 - index_patterns: [t*, te*] + index_patterns: [test*, test-*] settings: number_of_shards: 1 number_of_replicas: 0 @@ -278,7 +278,7 @@ index_patterns \n test_1 \s+ - \[t\*,\ te\*\] + \[test\*,\ test-\*\] \n \n $/ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index 0bf767b844e64..5d5110fb54e45 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -1,8 +1,8 @@ --- "cluster health basic test": - requires: - cluster_features: "gte_v8.7.0" - reason: "health was added in 8.2.0, master_is_stable in 8.4.0, and REST API updated in 8.7" + cluster_features: "gte_v8.13.0" + reason: "data stream lifecycle indicator was added in 8.13.0" - do: health_report: { } @@ -36,34 +36,10 @@ - exists: indicators.shards_availability.details.started_primaries - exists: indicators.shards_availability.details.unassigned_replicas ---- -"basic shards capacity health indicator test": - - requires: - cluster_features: "gte_v8.8.0" - reason: "shards capacity indicator was added in 8.8.0" - - - do: - health_report: { } - - - is_true: cluster_name - # This test might execute before the health node has received all health info, resulting in status "unknown" - - is_true: status - match: { indicators.shards_capacity.status: "green" } - match: { indicators.shards_capacity.symptom: "The cluster has enough room to add new shards." } - exists: indicators.shards_capacity.details.data.max_shards_in_cluster - exists: indicators.shards_capacity.details.frozen.max_shards_in_cluster ---- -"basic data stream lifecycle health indicator test": - - requires: - cluster_features: "gte_v8.13.0" - reason: "data stream lifecycle indicator was added in 8.13.0" - - - do: - health_report: { } - - - is_true: cluster_name - # This test might execute before the health node has received all health info, resulting in status "unknown" - - is_true: status - is_true: indicators.data_stream_lifecycle.status - is_true: indicators.data_stream_lifecycle.symptom diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml index 81c8cf64169e2..f62e06d43b857 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_index_template/10_basic.yml @@ -9,7 +9,7 @@ indices.put_index_template: name: test body: - index_patterns: te* + index_patterns: test* template: settings: number_of_shards: 1 @@ -39,7 +39,7 @@ indices.put_index_template: name: existing_test body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: @@ -66,7 +66,7 @@ indices.simulate_index_template: name: test body: - index_patterns: te* + index_patterns: test* priority: 15 template: settings: @@ -79,7 +79,7 @@ - match: {template.settings.index.number_of_replicas: "2"} - match: {template.mappings.properties.ct_field.type: "keyword"} - match: {overlapping.0.name: existing_test} - - match: {overlapping.0.index_patterns: ["te*"]} + - match: {overlapping.0.index_patterns: ["test*"]} - length: {template.aliases: 1} - is_true: template.aliases.test_alias @@ -92,11 +92,11 @@ - do: allowed_warnings: - - "index template [test] has index patterns [te*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test] will take precedence during new index creation" + - "index template [test] has index patterns [test*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test] will take precedence during new index creation" indices.put_index_template: name: test body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: @@ -124,18 +124,18 @@ indices.put_template: name: v1_template body: - index_patterns: [t*, t1*] + index_patterns: [test*, t1*] settings: number_of_shards: 5 - do: allowed_warnings: - - "index template [v2_template] has index patterns [te*] matching patterns from existing older templates [v1_template] with patterns - (v1_template => [t*, t1*]); this template [v2_template] will take precedence during new index creation" + - "index template [v2_template] has index patterns [test*] matching patterns from existing older templates [v1_template] with patterns + (v1_template => [test*, t1*]); this template [v2_template] will take precedence during new index creation" indices.put_index_template: name: v2_template body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: @@ -148,12 +148,12 @@ - do: allowed_warnings: - - "index template [winning_v2_template] has index patterns [te*] matching patterns from existing older templates [v1_template] with patterns - (v1_template => [t*, t1*]); this template [winning_v2_template] will take precedence during new index creation" + - "index template [winning_v2_template] has index patterns [test*] matching patterns from existing older templates [v1_template] with patterns + (v1_template => [test*, t1*]); this template [winning_v2_template] will take precedence during new index creation" indices.put_index_template: name: winning_v2_template body: - index_patterns: te* + index_patterns: test* priority: 20 template: settings: @@ -172,9 +172,9 @@ - match: {template.settings.index.number_of_replicas: "0"} - match: {template.mappings.properties.field.type: "keyword"} - match: {overlapping.0.name: v1_template} - - match: {overlapping.0.index_patterns: ["t*", "t1*"]} + - match: {overlapping.0.index_patterns: ["test*", "t1*"]} - match: {overlapping.1.name: v2_template} - - match: {overlapping.1.index_patterns: ["te*"]} + - match: {overlapping.1.index_patterns: ["test*"]} --- "Simulate an index for and index or alias that already exists": @@ -187,7 +187,7 @@ indices.put_index_template: name: test body: - index_patterns: [te*] + index_patterns: [test*] template: settings: number_of_shards: 1 @@ -235,7 +235,7 @@ indices.put_index_template: name: test body: - index_patterns: te* + index_patterns: test* template: lifecycle: data_retention: "7d" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml index 73ab9c18a8ec3..b9c6432751aac 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.simulate_template/10_basic.yml @@ -39,7 +39,7 @@ indices.put_index_template: name: existing_test body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: @@ -65,7 +65,7 @@ - do: indices.simulate_template: body: - index_patterns: te* + index_patterns: test* priority: 15 template: settings: @@ -78,7 +78,7 @@ - match: {template.settings.index.number_of_replicas: "2"} - match: {template.mappings.properties.ct_field.type: "keyword"} - match: {overlapping.0.name: existing_test} - - match: {overlapping.0.index_patterns: ["te*"]} + - match: {overlapping.0.index_patterns: ["test*"]} - length: {template.aliases: 1} - is_true: template.aliases.test_alias @@ -93,18 +93,18 @@ indices.put_template: name: v1_template body: - index_patterns: [t*, t1*] + index_patterns: [test*, t1*] settings: number_of_shards: 5 - do: allowed_warnings: - - "index template [v2_template] has index patterns [te*] matching patterns from existing older templates [v1_template] with patterns - (v1_template => [t*, t1*]); this template [v2_template] will take precedence during new index creation" + - "index template [v2_template] has index patterns [test*] matching patterns from existing older templates [v1_template] with patterns + (v1_template => [test*, t1*]); this template [v2_template] will take precedence during new index creation" indices.put_index_template: name: v2_template body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: @@ -117,12 +117,12 @@ - do: allowed_warnings: - - "index template [winning_v2_template] has index patterns [te*] matching patterns from existing older templates [v1_template] with patterns - (v1_template => [t*, t1*]); this template [winning_v2_template] will take precedence during new index creation" + - "index template [winning_v2_template] has index patterns [test*] matching patterns from existing older templates [v1_template] with patterns + (v1_template => [test*, t1*]); this template [winning_v2_template] will take precedence during new index creation" indices.put_index_template: name: winning_v2_template body: - index_patterns: te* + index_patterns: test* priority: 20 template: settings: @@ -141,9 +141,9 @@ - match: {template.settings.index.number_of_replicas: "0"} - match: {template.mappings.properties.field.type: "keyword"} - match: {overlapping.0.name: v1_template} - - match: {overlapping.0.index_patterns: ["t*", "t1*"]} + - match: {overlapping.0.index_patterns: ["test*", "t1*"]} - match: {overlapping.1.name: v2_template} - - match: {overlapping.1.index_patterns: ["te*"]} + - match: {overlapping.1.index_patterns: ["test*"]} --- "Simulate replacing a template with a newer version": @@ -156,7 +156,7 @@ indices.put_index_template: name: v2_template body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: @@ -183,7 +183,7 @@ indices.simulate_template: name: v2_template body: - index_patterns: te* + index_patterns: test* priority: 10 template: settings: diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4ce390831cb22..d372f4ee023bd 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -175,6 +175,9 @@ static TransportVersion def(int id) { public static final TransportVersion RULE_QUERY_RENAME = def(8_666_00_0); public static final TransportVersion SPARSE_VECTOR_QUERY_ADDED = def(8_667_00_0); public static final TransportVersion ESQL_ADD_INDEX_MODE_TO_SOURCE = def(8_668_00_0); + public static final TransportVersion GET_SHUTDOWN_STATUS_TIMEOUT = def(8_669_00_0); + public static final TransportVersion FAILURE_STORE_TELEMETRY = def(8_670_00_0); + public static final TransportVersion ADD_METADATA_FLATTENED_TO_ROLES = def(8_671_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index f08dde7c5ba94..5f56768138095 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -800,13 +800,7 @@ private static ActionFilters setupActionFilters(List actionPlugins List finalFilters = new ArrayList<>(); List mappedFilters = new ArrayList<>(); for (var plugin : actionPlugins) { - for (var filter : plugin.getActionFilters()) { - if (filter instanceof MappedActionFilter mappedFilter) { - mappedFilters.add(mappedFilter); - } else { - finalFilters.add(filter); - } - } + finalFilters.addAll(plugin.getActionFilters()); mappedFilters.addAll(plugin.getMappedActionFilters()); } if (mappedFilters.isEmpty() == false) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java index e8d63affcb8bf..61e43ef9ab27f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java @@ -99,7 +99,7 @@ protected void masterOperation( rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), rolloverRequest.getCreateIndexRequest(), - false + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); @@ -121,13 +121,9 @@ protected void masterOperation( String source = "lazy_rollover source [" + trialRolloverIndexName + "] to target [" + trialRolloverIndexName + "]"; // We create a new rollover request to ensure that it doesn't contain any other parameters apart from the data stream name // This will provide a more resilient user experience - RolloverTask rolloverTask = new RolloverTask( - new RolloverRequest(rolloverRequest.getRolloverTarget(), null), - null, - trialRolloverResponse, - null, - listener - ); + var newRolloverRequest = new RolloverRequest(rolloverRequest.getRolloverTarget(), null); + newRolloverRequest.setIndicesOptions(rolloverRequest.indicesOptions()); + RolloverTask rolloverTask = new RolloverTask(newRolloverRequest, null, trialRolloverResponse, null, listener); submitRolloverTask(rolloverRequest, source, rolloverTask); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 09f9411d5a834..dea772cc893f2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -153,10 +153,6 @@ public ActionRequestValidationException validate() { ); } - if (failureStoreOptions.includeFailureIndices() && lazy) { - validationException = addValidationError("lazily rolling over a failure store is currently not supported", validationException); - } - return validationException; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 9f174c3adca1b..e0a28e635a0a3 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -16,8 +16,13 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.RoutingMissingException; +import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -32,6 +37,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -48,6 +54,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; @@ -55,6 +62,7 @@ import java.util.function.Consumer; import java.util.function.LongSupplier; +import static org.elasticsearch.action.bulk.TransportBulkAction.LAZY_ROLLOVER_ORIGIN; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY; /** @@ -80,6 +88,9 @@ final class BulkOperation extends ActionRunnable { private final FailureStoreDocumentConverter failureStoreDocumentConverter; private final IndexNameExpressionResolver indexNameExpressionResolver; private final NodeClient client; + private final OriginSettingClient rolloverClient; + private final Set failureStoresToBeRolledOver = ConcurrentCollections.newConcurrentSet(); + private final Set failedRolloverRequests = ConcurrentCollections.newConcurrentSet(); BulkOperation( Task task, @@ -144,6 +155,7 @@ final class BulkOperation extends ActionRunnable { this.client = client; this.observer = observer; this.failureStoreDocumentConverter = failureStoreDocumentConverter; + this.rolloverClient = new OriginSettingClient(client, LAZY_ROLLOVER_ORIGIN); } @Override @@ -168,8 +180,63 @@ private void doRedirectFailures() { )) { return; } - Map> requestsByShard = drainAndGroupRedirectsByShards(clusterState); - executeBulkRequestsByShard(requestsByShard, clusterState, this::completeBulkOperation); + Runnable executeRedirectRequests = () -> { + // Get new cluster state that includes any potential failure store rollovers. + var rolledOverState = observer.setAndGetObservedState(); + Map> requestsByShard = drainAndGroupRedirectsByShards(rolledOverState); + executeBulkRequestsByShard(requestsByShard, rolledOverState, this::completeBulkOperation); + }; + rollOverFailureStores(executeRedirectRequests); + } + + /** + * Send rollover requests for all failure stores that need it. After all requests have completed, we execute the given runnable. + * Any failures while rolling over will be added to the {@link BulkItemResponse} entries of the index requests that were redirected to + * the failure store that failed to roll over. + */ + private void rollOverFailureStores(Runnable runnable) { + // Skip allocation of some objects if we don't need to roll over anything. + if (failureStoresToBeRolledOver.isEmpty() || DataStream.isFailureStoreFeatureFlagEnabled() == false) { + runnable.run(); + return; + } + try (RefCountingRunnable refs = new RefCountingRunnable(runnable)) { + for (String dataStream : failureStoresToBeRolledOver) { + RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .build() + ); + // We are executing a lazy rollover because it is an action specialised for this situation, when we want an + // unconditional and performant rollover. + rolloverClient.execute(LazyRolloverAction.INSTANCE, rolloverRequest, ActionListener.releaseAfter(new ActionListener<>() { + + @Override + public void onResponse(RolloverResponse result) { + // A successful response has rolled_over false when in the following cases: + // - A request had the parameter lazy or dry_run enabled + // - A request had conditions that were not met + // Since none of the above apply, getting a response with rolled_over false is considered a bug + // that should be caught here and inform the developer. + assert result.isRolledOver() : "An successful lazy rollover should always result in a rolled over data stream"; + } + + @Override + public void onFailure(Exception e) { + for (BulkItemRequest failureStoreRedirect : failureStoreRedirects) { + // Both these values are the name of the _data stream_ that the failure store belongs to. + if (failureStoreRedirect.index().equals(dataStream) == false) { + continue; + } + addFailure(failureStoreRedirect.request(), failureStoreRedirect.id(), failureStoreRedirect.index(), e); + failedRolloverRequests.add(failureStoreRedirect.id()); + } + } + + }, refs.acquire())); + } + } } private long buildTookInMillis(long startTimeNanos) { @@ -219,6 +286,9 @@ private Map> groupRequestsByShards( if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } + if (failedRolloverRequests.contains(bulkItemRequest.id())) { + continue; + } IndexAbstraction ia = null; try { ia = concreteIndices.resolveIfAbsent(docWriteRequest); @@ -372,9 +442,11 @@ public void onResponse(BulkShardResponse bulkShardResponse) { BulkItemRequest bulkItemRequest = bulkShardRequest.items()[idx]; assert bulkItemRequest.id() == bulkItemResponse.getItemId() : "Bulk items were returned out of order"; - String failureStoreReference = getRedirectTarget(bulkItemRequest.request(), getClusterState().metadata()); + DataStream failureStoreReference = getRedirectTarget(bulkItemRequest.request(), getClusterState().metadata()); if (failureStoreReference != null) { - addDocumentToRedirectRequests(bulkItemRequest, bulkItemResponse.getFailure().getCause(), failureStoreReference); + maybeMarkFailureStoreForRollover(failureStoreReference); + var cause = bulkItemResponse.getFailure().getCause(); + addDocumentToRedirectRequests(bulkItemRequest, cause, failureStoreReference.getName()); } addFailure(bulkItemResponse); } else { @@ -392,9 +464,10 @@ public void onFailure(Exception e) { final String indexName = request.index(); DocWriteRequest docWriteRequest = request.request(); - String failureStoreReference = getRedirectTarget(docWriteRequest, getClusterState().metadata()); + DataStream failureStoreReference = getRedirectTarget(docWriteRequest, getClusterState().metadata()); if (failureStoreReference != null) { - addDocumentToRedirectRequests(request, e, failureStoreReference); + maybeMarkFailureStoreForRollover(failureStoreReference); + addDocumentToRedirectRequests(request, e, failureStoreReference.getName()); } addFailure(docWriteRequest, request.id(), indexName, e); } @@ -416,10 +489,9 @@ private void completeShardOperation() { * * @param docWriteRequest the write request to check * @param metadata cluster state metadata for resolving index abstractions - * @return a data stream name if the write request points to a data stream that has the failure store enabled, - * or {@code null} if it does + * @return a data stream if the write request points to a data stream that has the failure store enabled, or {@code null} if it does not */ - private static String getRedirectTarget(DocWriteRequest docWriteRequest, Metadata metadata) { + private static DataStream getRedirectTarget(DocWriteRequest docWriteRequest, Metadata metadata) { // Feature flag guard if (DataStream.isFailureStoreFeatureFlagEnabled() == false) { return null; @@ -442,7 +514,7 @@ private static String getRedirectTarget(DocWriteRequest docWriteRequest, Meta DataStream parentDataStream = writeIndexAbstraction.getParentDataStream(); if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { // Keep the data stream name around to resolve the redirect to failure store if the shard level request fails. - return parentDataStream.getName(); + return parentDataStream; } } return null; @@ -489,6 +561,17 @@ private void addDocumentToRedirectRequests(BulkItemRequest request, Exception ca failureStoreRedirects.add(redirected); } + /** + * Check whether the failure store of the given data stream is marked for lazy rollover. + * If so, we'll need to roll it over before we index the failed documents into the failure store. + */ + private void maybeMarkFailureStoreForRollover(DataStream dataStream) { + if (dataStream.getFailureIndices().isRolloverOnWrite() == false) { + return; + } + failureStoresToBeRolledOver.add(dataStream.getName()); + } + /** * Examine the cluster state for blocks before continuing. If any block exists in the cluster state, this function will return * {@code true}. If the block is retryable, the {@code retryOperation} runnable will be called asynchronously if the cluster ever diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 13c4009cbc3e2..a9431ca1eeff0 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.ingest.IngestActionForwarder; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.WriteResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; @@ -65,6 +66,7 @@ import org.elasticsearch.transport.TransportService; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -72,8 +74,8 @@ import java.util.SortedMap; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.function.LongSupplier; -import java.util.stream.Collectors; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -348,46 +350,11 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor ex return; } - // Attempt to create all the indices that we're going to need during the bulk before we start. - // Step 1: collect all the indices in the request - final Map indices = bulkRequest.requests.stream() - // delete requests should not attempt to create the index (if the index does not - // exist), unless an external versioning is used - .filter( - request -> request.opType() != DocWriteRequest.OpType.DELETE - || request.versionType() == VersionType.EXTERNAL - || request.versionType() == VersionType.EXTERNAL_GTE - ) - .collect( - Collectors.toMap( - DocWriteRequest::index, - request -> ReducedRequestInfo.of(request.isRequireAlias(), request.isRequireDataStream()), - (existing, updated) -> ReducedRequestInfo.of( - existing.isRequireAlias || updated.isRequireAlias, - existing.isRequireDataStream || updated.isRequireDataStream - ) - ) - ); + Map indicesToAutoCreate = new HashMap<>(); + Set dataStreamsToBeRolledOver = new HashSet<>(); + Set failureStoresToBeRolledOver = new HashSet<>(); + populateMissingTargets(bulkRequest, indicesToAutoCreate, dataStreamsToBeRolledOver, failureStoresToBeRolledOver); - // Step 2: filter the list of indices to find those that don't currently exist. - final Map indicesThatCannotBeCreated = new HashMap<>(); - final ClusterState state = clusterService.state(); - Map indicesToAutoCreate = indices.entrySet() - .stream() - .filter(entry -> indexNameExpressionResolver.hasIndexAbstraction(entry.getKey(), state) == false) - // We should only auto create if we are not requiring it to be an alias - .filter(entry -> entry.getValue().isRequireAlias == false) - .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().isRequireDataStream)); - - // Step 3: Collect all the data streams that need to be rolled over before writing - Set dataStreamsToBeRolledOver = featureService.clusterHasFeature(state, LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER) - ? indices.keySet().stream().filter(target -> { - DataStream dataStream = state.metadata().dataStreams().get(target); - return dataStream != null && dataStream.rolloverOnWrite(); - }).collect(Collectors.toSet()) - : Set.of(); - - // Step 4: create all the indices that are missing, if there are any missing. start the bulk after all the creates come back. createMissingIndicesAndIndexData( task, bulkRequest, @@ -395,14 +362,79 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor ex listener, indicesToAutoCreate, dataStreamsToBeRolledOver, - indicesThatCannotBeCreated, + failureStoresToBeRolledOver, startTime ); } - /* - * This method is responsible for creating any missing indices, rolling over a data stream when needed and then - * indexing the data in the BulkRequest + /** + * Determine all the targets (i.e. indices, data streams, failure stores) that require an action before we can proceed with the bulk + * request. Indices might need to be created, and data streams and failure stores might need to be rolled over when they're marked + * for lazy rollover. + * + * @param bulkRequest the bulk request + * @param indicesToAutoCreate a map of index names to whether they require a data stream + * @param dataStreamsToBeRolledOver a set of data stream names that were marked for lazy rollover and thus need to be rolled over now + * @param failureStoresToBeRolledOver a set of data stream names whose failure store was marked for lazy rollover and thus need to be + * rolled over now + */ + private void populateMissingTargets( + BulkRequest bulkRequest, + Map indicesToAutoCreate, + Set dataStreamsToBeRolledOver, + Set failureStoresToBeRolledOver + ) { + ClusterState state = clusterService.state(); + // A map for memorizing which indices we already exist (or don't). + Map indexExistence = new HashMap<>(); + Function indexExistenceComputation = (index) -> indexNameExpressionResolver.hasIndexAbstraction(index, state); + boolean lazyRolloverFeature = featureService.clusterHasFeature(state, LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER); + boolean lazyRolloverFailureStoreFeature = DataStream.isFailureStoreFeatureFlagEnabled(); + Set indicesThatRequireAlias = new HashSet<>(); + + for (DocWriteRequest request : bulkRequest.requests) { + // Delete requests should not attempt to create the index (if the index does not exist), unless an external versioning is used. + if (request.opType() == OpType.DELETE + && request.versionType() != VersionType.EXTERNAL + && request.versionType() != VersionType.EXTERNAL_GTE) { + continue; + } + boolean indexExists = indexExistence.computeIfAbsent(request.index(), indexExistenceComputation); + if (indexExists == false) { + // We should only auto create an index if _none_ of the requests are requiring it to be an alias. + if (request.isRequireAlias()) { + // Remember that this a request required this index to be an alias. + if (indicesThatRequireAlias.add(request.index())) { + // If we didn't already know that, we remove the index from the list of indices to create (if present). + indicesToAutoCreate.remove(request.index()); + } + } else if (indicesThatRequireAlias.contains(request.index()) == false) { + Boolean requiresDataStream = indicesToAutoCreate.get(request.index()); + if (requiresDataStream == null || (requiresDataStream == false && request.isRequireDataStream())) { + indicesToAutoCreate.put(request.index(), request.isRequireDataStream()); + } + } + } + // Determine which data streams and failure stores need to be rolled over. + if (lazyRolloverFeature) { + DataStream dataStream = state.metadata().dataStreams().get(request.index()); + if (dataStream != null) { + var writeToFailureStore = request instanceof IndexRequest indexRequest && indexRequest.isWriteToFailureStore(); + if (writeToFailureStore == false && dataStream.getBackingIndices().isRolloverOnWrite()) { + dataStreamsToBeRolledOver.add(request.index()); + } else if (lazyRolloverFailureStoreFeature + && writeToFailureStore + && dataStream.getFailureIndices().isRolloverOnWrite()) { + failureStoresToBeRolledOver.add(request.index()); + } + } + } + } + } + + /** + * This method is responsible for creating any missing indices, rolling over data streams and their failure stores when needed, and then + * indexing the data in the BulkRequest. */ protected void createMissingIndicesAndIndexData( Task task, @@ -411,15 +443,16 @@ protected void createMissingIndicesAndIndexData( ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToBeRolledOver, - Map indicesThatCannotBeCreated, + Set failureStoresToBeRolledOver, long startTime ) { final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); // Optimizing when there are no prerequisite actions - if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty()) { - executeBulk(task, bulkRequest, startTime, listener, executor, responses, indicesThatCannotBeCreated); + if (indicesToAutoCreate.isEmpty() && dataStreamsToBeRolledOver.isEmpty() && failureStoresToBeRolledOver.isEmpty()) { + executeBulk(task, bulkRequest, startTime, listener, executor, responses, Map.of()); return; } + final Map indicesThatCannotBeCreated = new HashMap<>(); Runnable executeBulkRunnable = () -> executor.execute(new ActionRunnable<>(listener) { @Override protected void doRun() { @@ -427,45 +460,77 @@ protected void doRun() { } }); try (RefCountingRunnable refs = new RefCountingRunnable(executeBulkRunnable)) { - for (Map.Entry indexEntry : indicesToAutoCreate.entrySet()) { - final String index = indexEntry.getKey(); - createIndex(index, indexEntry.getValue(), bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { - @Override - public void onResponse(CreateIndexResponse createIndexResponse) {} - - @Override - public void onFailure(Exception e) { - final Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause instanceof IndexNotFoundException indexNotFoundException) { - synchronized (indicesThatCannotBeCreated) { - indicesThatCannotBeCreated.put(index, indexNotFoundException); - } - } else if ((cause instanceof ResourceAlreadyExistsException) == false) { - // fail all requests involving this index, if create didn't work - failRequestsWhenPrerequisiteActionFailed(index, bulkRequest, responses, e); + createIndices(bulkRequest, indicesToAutoCreate, indicesThatCannotBeCreated, responses, refs); + rollOverDataStreams(bulkRequest, dataStreamsToBeRolledOver, false, responses, refs); + rollOverDataStreams(bulkRequest, failureStoresToBeRolledOver, true, responses, refs); + } + } + + private void createIndices( + BulkRequest bulkRequest, + Map indicesToAutoCreate, + Map indicesThatCannotBeCreated, + AtomicArray responses, + RefCountingRunnable refs + ) { + for (Map.Entry indexEntry : indicesToAutoCreate.entrySet()) { + final String index = indexEntry.getKey(); + createIndex(index, indexEntry.getValue(), bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { + @Override + public void onResponse(CreateIndexResponse createIndexResponse) {} + + @Override + public void onFailure(Exception e) { + final Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof IndexNotFoundException indexNotFoundException) { + synchronized (indicesThatCannotBeCreated) { + indicesThatCannotBeCreated.put(index, indexNotFoundException); } + } else if ((cause instanceof ResourceAlreadyExistsException) == false) { + // fail all requests involving this index, if create didn't work + failRequestsWhenPrerequisiteActionFailed(index, bulkRequest, responses, e); } - }, refs.acquire())); - } - for (String dataStream : dataStreamsToBeRolledOver) { - lazyRolloverDataStream(dataStream, bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { - - @Override - public void onResponse(RolloverResponse result) { - // A successful response has rolled_over false when in the following cases: - // - A request had the parameter lazy or dry_run enabled - // - A request had conditions that were not met - // Since none of the above apply, getting a response with rolled_over false is considered a bug - // that should be caught here and inform the developer. - assert result.isRolledOver() : "An successful lazy rollover should always result in a rolled over data stream"; - } + } + }, refs.acquire())); + } + } - @Override - public void onFailure(Exception e) { - failRequestsWhenPrerequisiteActionFailed(dataStream, bulkRequest, responses, e); - } - }, refs.acquire())); + private void rollOverDataStreams( + BulkRequest bulkRequest, + Set dataStreamsToBeRolledOver, + boolean targetFailureStore, + AtomicArray responses, + RefCountingRunnable refs + ) { + for (String dataStream : dataStreamsToBeRolledOver) { + RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); + rolloverRequest.masterNodeTimeout(bulkRequest.timeout); + if (targetFailureStore) { + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .build() + ); } + // We are executing a lazy rollover because it is an action specialised for this situation, when we want an + // unconditional and performant rollover. + rolloverClient.execute(LazyRolloverAction.INSTANCE, rolloverRequest, ActionListener.releaseAfter(new ActionListener<>() { + + @Override + public void onResponse(RolloverResponse result) { + // A successful response has rolled_over false when in the following cases: + // - A request had the parameter lazy or dry_run enabled + // - A request had conditions that were not met + // Since none of the above apply, getting a response with rolled_over false is considered a bug + // that should be caught here and inform the developer. + assert result.isRolledOver() : "An successful lazy rollover should always result in a rolled over data stream"; + } + + @Override + public void onFailure(Exception e) { + failRequestsWhenPrerequisiteActionFailed(dataStream, bulkRequest, responses, e); + } + }, refs.acquire())); } } @@ -585,14 +650,6 @@ void createIndex(String index, boolean requireDataStream, TimeValue timeout, Act client.execute(AutoCreateAction.INSTANCE, createIndexRequest, listener); } - void lazyRolloverDataStream(String dataStream, TimeValue timeout, ActionListener listener) { - RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); - rolloverRequest.masterNodeTimeout(timeout); - // We are executing a lazy rollover because it is an action specialised for this situation, when we want an - // unconditional and performant rollover. - rolloverClient.execute(LazyRolloverAction.INSTANCE, rolloverRequest, listener); - } - private static boolean setResponseFailureIfIndexMatches( AtomicArray responses, int idx, @@ -612,31 +669,6 @@ protected long buildTookInMillis(long startTimeNanos) { return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos); } - private enum ReducedRequestInfo { - - REQUIRE_ALIAS_AND_DATA_STREAM(true, true), - REQUIRE_ALIAS_NOT_DATA_STREAM(true, false), - - REQUIRE_DATA_STREAM_NOT_ALIAS(false, true), - REQUIRE_NOTHING(false, false); - - private final boolean isRequireAlias; - private final boolean isRequireDataStream; - - ReducedRequestInfo(boolean isRequireAlias, boolean isRequireDataStream) { - this.isRequireAlias = isRequireAlias; - this.isRequireDataStream = isRequireDataStream; - } - - static ReducedRequestInfo of(boolean isRequireAlias, boolean isRequireDataStream) { - if (isRequireAlias) { - return isRequireDataStream ? REQUIRE_ALIAS_AND_DATA_STREAM : REQUIRE_ALIAS_NOT_DATA_STREAM; - } - return isRequireDataStream ? REQUIRE_DATA_STREAM_NOT_ALIAS : REQUIRE_NOTHING; - } - - } - void executeBulk( Task task, BulkRequest bulkRequest, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index d543e3b56a9ef..83d331d2e4aa1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.ingest.IngestService; @@ -75,7 +74,7 @@ protected void createMissingIndicesAndIndexData( ActionListener listener, Map indicesToAutoCreate, Set dataStreamsToRollover, - Map indicesThatCannotBeCreated, + Set failureStoresToBeRolledOver, long startTime ) { final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); diff --git a/server/src/main/java/org/elasticsearch/action/support/MappedActionFilter.java b/server/src/main/java/org/elasticsearch/action/support/MappedActionFilter.java index 82e1804716cbc..deff2d371e633 100644 --- a/server/src/main/java/org/elasticsearch/action/support/MappedActionFilter.java +++ b/server/src/main/java/org/elasticsearch/action/support/MappedActionFilter.java @@ -8,6 +8,31 @@ package org.elasticsearch.action.support; -public interface MappedActionFilter extends ActionFilter { +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.tasks.Task; + +/** + * An action filter that is run only for a single action. + * + * Note: This is an independent interface from {@link ActionFilter} so that it does not + * have an order. The relative order of executed MappedActionFilter with the same action name + * is undefined. + */ +public interface MappedActionFilter { + /** Return the name of the action for which this filter should be run */ String actionName(); + + /** + * Enables filtering the execution of an action on the request side, either by sending a response through the + * {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain} + */ + void apply( + Task task, + String action, + Request request, + ActionListener listener, + ActionFilterChain chain + ); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 16984bcbde0f1..082e1dd9257e0 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -55,6 +55,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.bootstrap.BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING; +import static org.elasticsearch.nativeaccess.WindowsFunctions.ConsoleCtrlHandler.CTRL_CLOSE_EVENT; /** * This class starts elasticsearch. @@ -302,17 +303,17 @@ static void initializeNatives(final Path tmpFile, final boolean mlockAll, final // listener for windows close event if (ctrlHandler) { - Natives.addConsoleCtrlHandler(new ConsoleCtrlHandler() { - @Override - public boolean handle(int code) { + var windowsFunctions = nativeAccess.getWindowsFunctions(); + if (windowsFunctions != null) { + windowsFunctions.addConsoleCtrlHandler(code -> { if (CTRL_CLOSE_EVENT == code) { logger.info("running graceful exit on windows"); shutdown(); return true; } return false; - } - }); + }); + } } // force remainder of JNA to be loaded (if available). diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java b/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java index c5bdef24d6b81..01d9a122138f1 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java @@ -14,15 +14,12 @@ import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.WString; -import com.sun.jna.win32.StdCallLibrary; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; /** @@ -32,10 +29,6 @@ final class JNAKernel32Library { private static final Logger logger = LogManager.getLogger(JNAKernel32Library.class); - // Callbacks must be kept around in order to be able to be called later, - // when the Windows ConsoleCtrlHandler sends an event. - private List callbacks = new ArrayList<>(); - // Native library instance must be kept around for the same reason. private static final class Holder { private static final JNAKernel32Library instance = new JNAKernel32Library(); @@ -58,61 +51,6 @@ static JNAKernel32Library getInstance() { return Holder.instance; } - /** - * Adds a Console Ctrl Handler. - * - * @return true if the handler is correctly set - * @throws java.lang.UnsatisfiedLinkError if the Kernel32 library is not loaded or if the native function is not found - * @throws java.lang.NoClassDefFoundError if the library for native calls is missing - */ - boolean addConsoleCtrlHandler(ConsoleCtrlHandler handler) { - boolean result = false; - if (handler != null) { - NativeHandlerCallback callback = new NativeHandlerCallback(handler); - result = SetConsoleCtrlHandler(callback, true); - if (result) { - callbacks.add(callback); - } - } - return result; - } - - List getCallbacks() { - return Collections.unmodifiableList(callbacks); - } - - /** - * Native call to the Kernel32 API to set a new Console Ctrl Handler. - * - * @return true if the handler is correctly set - * @throws java.lang.UnsatisfiedLinkError if the Kernel32 library is not loaded or if the native function is not found - * @throws java.lang.NoClassDefFoundError if the library for native calls is missing - */ - native boolean SetConsoleCtrlHandler(StdCallLibrary.StdCallCallback handler, boolean add); - - /** - * Handles consoles event with WIN API - *

- * See http://msdn.microsoft.com/en-us/library/windows/desktop/ms683242%28v=vs.85%29.aspx - */ - class NativeHandlerCallback implements StdCallLibrary.StdCallCallback { - - private final ConsoleCtrlHandler handler; - - NativeHandlerCallback(ConsoleCtrlHandler handler) { - this.handler = handler; - } - - public boolean callback(long dwCtrlType) { - int event = (int) dwCtrlType; - if (logger.isDebugEnabled()) { - logger.debug("console control handler receives event [{}@{}]", event, dwCtrlType); - - } - return handler.handle(event); - } - } - /** * Memory protection constraints * diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 31f1d83af3e15..ba4e90ee2c6c1 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -8,12 +8,8 @@ package org.elasticsearch.bootstrap; -import com.sun.jna.Native; -import com.sun.jna.WString; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.Constants; import java.nio.file.Path; @@ -34,51 +30,6 @@ private JNANatives() {} // otherwise they are only inherited for new threads (ES app threads) static boolean LOCAL_SYSTEM_CALL_FILTER_ALL = false; - /** - * Retrieves the short path form of the specified path. - * - * @param path the path - * @return the short path name (or the original path if getting the short path name fails for any reason) - */ - static String getShortPathName(String path) { - assert Constants.WINDOWS; - try { - final WString longPath = new WString("\\\\?\\" + path); - // first we get the length of the buffer needed - final int length = JNAKernel32Library.getInstance().GetShortPathNameW(longPath, null, 0); - if (length == 0) { - logger.warn("failed to get short path name: {}", Native.getLastError()); - return path; - } - final char[] shortPath = new char[length]; - // knowing the length of the buffer, now we get the short name - if (JNAKernel32Library.getInstance().GetShortPathNameW(longPath, shortPath, length) > 0) { - return Native.toString(shortPath); - } else { - logger.warn("failed to get short path name: {}", Native.getLastError()); - return path; - } - } catch (final UnsatisfiedLinkError e) { - return path; - } - } - - static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { - // The console Ctrl handler is necessary on Windows platforms only. - if (Constants.WINDOWS) { - try { - boolean result = JNAKernel32Library.getInstance().addConsoleCtrlHandler(handler); - if (result) { - logger.debug("console ctrl handler correctly set"); - } else { - logger.warn("unknown error {} when adding console ctrl handler", Native.getLastError()); - } - } catch (UnsatisfiedLinkError e) { - // this will have already been logged by Kernel32Library, no need to repeat it - } - } - } - static void tryInstallSystemCallFilter(Path tmpFile) { try { int ret = SystemCallFilter.init(tmpFile); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java index 0eb2a5b2bffa5..c792d1e0bfad0 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -51,28 +51,6 @@ private Natives() {} JNA_AVAILABLE = v; } - /** - * Retrieves the short path form of the specified path. - * - * @param path the path - * @return the short path name (or the original path if getting the short path name fails for any reason) - */ - static String getShortPathName(final String path) { - if (JNA_AVAILABLE == false) { - logger.warn("cannot obtain short path for [{}] because JNA is not available", path); - return path; - } - return JNANatives.getShortPathName(path); - } - - static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { - if (JNA_AVAILABLE == false) { - logger.warn("cannot register console handler because JNA is not available"); - return; - } - JNANatives.addConsoleCtrlHandler(handler); - } - static void tryInstallSystemCallFilter(Path tmpFile) { if (JNA_AVAILABLE == false) { logger.warn("cannot install system call filter because JNA is not available"); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index 4b09d5d143046..2d37da1d10245 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.core.IOUtils; import org.elasticsearch.env.Environment; +import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.PluginDescriptor; import org.elasticsearch.plugins.PluginsUtils; @@ -133,7 +134,7 @@ private static Process spawnNativeController(final Path spawnPath, final Path tm * http://hg.openjdk.java.net/jdk8/jdk8/jdk/file/687fd7c7986d/src/windows/native/java/lang/ProcessImpl_md.c#l319), this * limitation is in force. As such, we use the short name to avoid any such problems. */ - command = Natives.getShortPathName(spawnPath.toString()); + command = NativeAccess.instance().getWindowsFunctions().getShortPathName(spawnPath.toString()); } else { command = spawnPath.toString(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java index 80b4b455912e7..6a15e0327d669 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java @@ -141,18 +141,18 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources /** * Transforms a CoordinationDiagnosticsService.CoordinationDiagnosticsResult into a HealthIndicatorResult. * @param coordinationDiagnosticsResult The CoordinationDiagnosticsResult from the CoordinationDiagnosticsService to be transformed - * @param explain If false, the details and user actions returned will be empty + * @param verbose If false, the details and user actions returned will be empty * @return The HealthIndicatorResult */ // Non-private for testing HealthIndicatorResult getHealthIndicatorResult( CoordinationDiagnosticsService.CoordinationDiagnosticsResult coordinationDiagnosticsResult, - boolean explain + boolean verbose ) { HealthStatus status = HealthStatus.fromCoordinationDiagnosticsStatus(coordinationDiagnosticsResult.status()); - HealthIndicatorDetails details = getDetails(coordinationDiagnosticsResult.details(), explain); + HealthIndicatorDetails details = getDetails(coordinationDiagnosticsResult.details(), verbose); Collection impacts = status.indicatesHealthProblem() ? UNSTABLE_MASTER_IMPACTS : List.of(); - List diagnosis = status.indicatesHealthProblem() ? getUnstableMasterDiagnoses(explain) : List.of(); + List diagnosis = status.indicatesHealthProblem() ? getUnstableMasterDiagnoses(verbose) : List.of(); return createIndicator(status, coordinationDiagnosticsResult.summary(), details, impacts, diagnosis); } @@ -242,12 +242,12 @@ private String getNameForNodeId(String nodeId) { * This method returns the relevant user actions when the master is unstable, linking to some troubleshooting docs and suggesting to * contact support. * - * @param explain If true, the returned list includes UserActions linking to troubleshooting docs and another to contact support, + * @param verbose If true, the returned list includes UserActions linking to troubleshooting docs and another to contact support, * otherwise an empty list. * @return the relevant user actions when the master is unstable. */ - private List getUnstableMasterDiagnoses(boolean explain) { - if (explain) { + private List getUnstableMasterDiagnoses(boolean verbose) { + if (verbose) { return List.of(TROUBLESHOOT_DISCOVERY, TROUBLESHOOT_UNSTABLE_CLUSTER, CONTACT_SUPPORT); } else { return List.of(); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java index 7c176f65599a9..309848635a440 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorService.java @@ -984,34 +984,33 @@ private static Stream createMessage(int count, String singular, String p } public HealthIndicatorDetails getDetails(boolean verbose) { - if (verbose) { - return new SimpleHealthIndicatorDetails( - Map.of( - "unassigned_primaries", - primaries.unassigned, - "initializing_primaries", - primaries.initializing, - "creating_primaries", - primaries.unassigned_new, - "restarting_primaries", - primaries.unassigned_restarting, - "started_primaries", - primaries.started + primaries.relocating, - "unassigned_replicas", - replicas.unassigned, - "initializing_replicas", - replicas.initializing, - "creating_replicas", - replicas.unassigned_new, - "restarting_replicas", - replicas.unassigned_restarting, - "started_replicas", - replicas.started + replicas.relocating - ) - ); - } else { + if (verbose == false) { return HealthIndicatorDetails.EMPTY; } + return new SimpleHealthIndicatorDetails( + Map.of( + "unassigned_primaries", + primaries.unassigned, + "initializing_primaries", + primaries.initializing, + "creating_primaries", + primaries.unassigned_new, + "restarting_primaries", + primaries.unassigned_restarting, + "started_primaries", + primaries.started + primaries.relocating, + "unassigned_replicas", + replicas.unassigned, + "initializing_replicas", + replicas.initializing, + "creating_replicas", + replicas.unassigned_new, + "restarting_replicas", + replicas.unassigned_restarting, + "started_replicas", + replicas.started + replicas.relocating + ) + ); } public List getImpacts() { diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index b69fc928d69af..a999742f7b667 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -266,16 +266,19 @@ public static String threadName(final String nodeName, final String namePrefix) return "elasticsearch" + (nodeName.isEmpty() ? "" : "[") + nodeName + (nodeName.isEmpty() ? "" : "]") + "[" + namePrefix + "]"; } - public static String executorName(Thread thread) { - String name = thread.getName(); + public static String executorName(String threadName) { // subtract 2 to avoid the `]` of the thread number part. - int executorNameEnd = name.lastIndexOf(']', name.length() - 2); - int executorNameStart = name.lastIndexOf('[', executorNameEnd); + int executorNameEnd = threadName.lastIndexOf(']', threadName.length() - 2); + int executorNameStart = threadName.lastIndexOf('[', executorNameEnd); if (executorNameStart == -1 || executorNameEnd - executorNameStart <= 1) { return null; } + return threadName.substring(executorNameStart + 1, executorNameEnd); + } - return name.substring(executorNameStart + 1, executorNameEnd); + public static String executorName(Thread thread) { + String name = thread.getName(); + return executorName(name); } public static ThreadFactory daemonThreadFactory(Settings settings, String namePrefix) { diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index b08e0d221cf77..2227c54871352 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -748,4 +748,24 @@ public static XContentParser mapToXContentParser(XContentParserConfiguration con throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } } + + /** + * Drains all data available via this parser into a provided builder. + * Provided parser is closed as a result. + * @param parser + * @param destination + */ + public static void drainAndClose(XContentParser parser, XContentBuilder destination) throws IOException { + if (parser.isClosed()) { + throw new IllegalStateException("Can't drain a parser that is closed"); + } + + XContentParser.Token token; + do { + destination.copyCurrentStructure(parser); + token = parser.nextToken(); + } while (token != null); + + parser.close(); + } } diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java index 3304b71b4ca31..2dfb4300fc691 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthIndicatorService.java @@ -120,7 +120,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources diskHealthAnalyzer.getSymptom(), diskHealthAnalyzer.getDetails(verbose), diskHealthAnalyzer.getImpacts(), - diskHealthAnalyzer.getDiagnoses(maxAffectedResourcesCount) + diskHealthAnalyzer.getDiagnoses(verbose, maxAffectedResourcesCount) ); } @@ -357,8 +357,8 @@ List getImpacts() { return impacts; } - private List getDiagnoses(int size) { - if (healthStatus == HealthStatus.GREEN) { + private List getDiagnoses(boolean verbose, int size) { + if (verbose == false || healthStatus == HealthStatus.GREEN) { return List.of(); } List diagnosisList = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java index 16e18b69d5c1d..e5ced00905744 100644 --- a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java @@ -123,12 +123,13 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources var shardLimitsMetadata = healthMetadata.getShardLimitsMetadata(); return mergeIndicators( + verbose, calculateFrom(shardLimitsMetadata.maxShardsPerNode(), state, ShardLimitValidator::checkShardLimitForNormalNodes), calculateFrom(shardLimitsMetadata.maxShardsPerNodeFrozen(), state, ShardLimitValidator::checkShardLimitForFrozenNodes) ); } - private HealthIndicatorResult mergeIndicators(StatusResult dataNodes, StatusResult frozenNodes) { + private HealthIndicatorResult mergeIndicators(boolean verbose, StatusResult dataNodes, StatusResult frozenNodes) { var finalStatus = HealthStatus.merge(Stream.of(dataNodes.status, frozenNodes.status)); var diagnoses = List.of(); var symptomBuilder = new StringBuilder(); @@ -166,9 +167,9 @@ private HealthIndicatorResult mergeIndicators(StatusResult dataNodes, StatusResu return createIndicator( finalStatus, symptomBuilder.toString(), - buildDetails(dataNodes.result, frozenNodes.result), + verbose ? buildDetails(dataNodes.result, frozenNodes.result) : HealthIndicatorDetails.EMPTY, indicatorImpacts, - diagnoses + verbose ? diagnoses : List.of() ); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsWriter.java index 154e0ae399c17..2cb84dce6b09d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsWriter.java @@ -48,12 +48,12 @@ import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; +import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizedRandomVectorScorerSupplier; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.vec.VectorScorerFactory; -import org.elasticsearch.vec.VectorScorerSupplierAdapter; import org.elasticsearch.vec.VectorSimilarityType; import java.io.Closeable; @@ -425,19 +425,23 @@ private ScalarQuantizedCloseableRandomVectorScorerSupplier mergeOneFieldToIndex( success = true; final IndexInput finalQuantizationDataInput = quantizationDataInput; + final RandomAccessQuantizedByteVectorValues vectorValues = new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues( + fieldInfo.getVectorDimension(), + docsWithField.cardinality(), + quantizationDataInput + ); + // retrieve a scorer RandomVectorScorerSupplier scorerSupplier = null; Optional factory = VectorScorerFactory.instance(); if (factory.isPresent()) { var scorer = factory.get() .getInt7ScalarQuantizedVectorScorer( - byteVectorValues.dimension(), - docsWithField.cardinality(), - mergedQuantizationState.getConstantMultiplier(), VectorSimilarityType.of(fieldInfo.getVectorSimilarityFunction()), - quantizationDataInput - ) - .map(VectorScorerSupplierAdapter::new); + quantizationDataInput, + vectorValues, + mergedQuantizationState.getConstantMultiplier() + ); if (scorer.isPresent()) { scorerSupplier = scorer.get(); } @@ -446,11 +450,7 @@ private ScalarQuantizedCloseableRandomVectorScorerSupplier mergeOneFieldToIndex( scorerSupplier = new ScalarQuantizedRandomVectorScorerSupplier( fieldInfo.getVectorSimilarityFunction(), mergedQuantizationState, - new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues( - fieldInfo.getVectorDimension(), - docsWithField.cardinality(), - quantizationDataInput - ) + vectorValues ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index bdb6806440339..dbc098b6ce2ae 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -625,7 +625,10 @@ private static void parseNonDynamicArray( ) throws IOException { // Check if we need to record the array source. This only applies to synthetic source. if (context.mappingLookup().isSourceSynthetic() && context.getClonedSource() == false) { - if ((mapper instanceof ObjectMapper objectMapper && objectMapper.storeArraySource()) || mapper instanceof NestedObjectMapper) { + boolean storeArraySourceEnabled = mapper instanceof ObjectMapper objectMapper && objectMapper.storeArraySource(); + boolean fieldWithFallbackSyntheticSource = mapper instanceof FieldMapper fieldMapper + && fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK; + if (storeArraySourceEnabled || fieldWithFallbackSyntheticSource || mapper instanceof NestedObjectMapper) { // Clone the DocumentParserContext to parse its subtree twice. Tuple tuple = XContentDataHelper.cloneSubContext(context); context.addIgnoredField( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java b/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java index a527a2df20494..6b5b2537e5e1f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/XContentDataHelper.java @@ -30,7 +30,7 @@ /** * Helper class for processing field data of any type, as provided by the {@link XContentParser}. */ -final class XContentDataHelper { +public final class XContentDataHelper { /** * Build a {@link StoredField} for the value on which the parser is * currently positioned. @@ -57,7 +57,7 @@ static BytesRef encodeToken(XContentParser parser) throws IOException { * Build a {@link BytesRef} wrapping a byte array containing an encoded form * of the passed XContentBuilder contents. */ - static BytesRef encodeXContentBuilder(XContentBuilder builder) throws IOException { + public static BytesRef encodeXContentBuilder(XContentBuilder builder) throws IOException { return new BytesRef(TypeUtils.encode(builder)); } diff --git a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java index 61425250c19b4..4aab146230f7c 100644 --- a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java +++ b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java @@ -38,6 +38,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; public class ReadinessService extends AbstractLifecycleComponent implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(ReadinessService.class); @@ -244,13 +245,35 @@ public void clusterChanged(ClusterChangedEvent event) { logger.info("marking node as not ready because it's shutting down"); } } else { - boolean masterElected = clusterState.nodes().getMasterNodeId() != null; - boolean fileSettingsApplied = areFileSettingsApplied(clusterState); - logger.info("readiness: masterElected={}, fileSettingsApplied={}", masterElected, fileSettingsApplied); + boolean masterElected = getReadinessState(clusterState, event.previousState(), this::isMasterElected, "masterElected"); + boolean fileSettingsApplied = getReadinessState( + clusterState, + event.previousState(), + this::areFileSettingsApplied, + "fileSettingsApplied" + ); setReady(masterElected && fileSettingsApplied); } } + private boolean getReadinessState( + ClusterState clusterState, + ClusterState previousState, + Function accessor, + String description + ) { + boolean newStateValue = accessor.apply(clusterState); + boolean oldStateValue = accessor.apply(previousState); + if (oldStateValue != newStateValue) { + logger.info("readiness change: {}={}", description, newStateValue); + } + return newStateValue; + } + + private boolean isMasterElected(ClusterState clusterState) { + return clusterState.nodes().getMasterNodeId() != null; + } + // protected to allow mock service to override protected boolean areFileSettingsApplied(ClusterState clusterState) { ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index 3522cfb7ef0d7..281f34fe1ef6d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestUtils.getAckTimeout; @@ -46,6 +47,15 @@ public String getName() { return "rollover_index_action"; } + @Override + public Set supportedCapabilities() { + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + return Set.of("lazy-rollover-failure-store"); + } else { + return Set.of(); + } + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean includeTypeName = includeTypeName(request); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java index 67afddcb70664..f6ac8610b254d 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java @@ -127,7 +127,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources repositoryHealthAnalyzer.getSymptom(), repositoryHealthAnalyzer.getDetails(verbose), repositoryHealthAnalyzer.getImpacts(), - repositoryHealthAnalyzer.getDiagnoses(maxAffectedResourcesCount) + repositoryHealthAnalyzer.getDiagnoses(verbose, maxAffectedResourcesCount) ); } @@ -243,7 +243,10 @@ public List getImpacts() { return IMPACTS; } - public List getDiagnoses(int maxAffectedResourcesCount) { + public List getDiagnoses(boolean verbose, int maxAffectedResourcesCount) { + if (verbose == false) { + return List.of(); + } var diagnoses = new ArrayList(); if (corruptedRepositories.isEmpty() == false) { diagnoses.add( @@ -253,10 +256,10 @@ public List getDiagnoses(int maxAffectedResourcesCount) { ) ); } - if (unknownRepositories.size() > 0) { + if (unknownRepositories.isEmpty() == false) { diagnoses.add(createDiagnosis(UNKNOWN_DEFINITION, unknownRepositories, nodesWithUnknownRepos, maxAffectedResourcesCount)); } - if (invalidRepositories.size() > 0) { + if (invalidRepositories.isEmpty() == false) { diagnoses.add(createDiagnosis(INVALID_DEFINITION, invalidRepositories, nodesWithInvalidRepos, maxAffectedResourcesCount)); } return diagnoses; diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index ceda140827527..a887a2be558e0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -1118,9 +1118,10 @@ public static boolean assertNotScheduleThread(String reason) { public static boolean assertCurrentThreadPool(String... permittedThreadPoolNames) { final var threadName = Thread.currentThread().getName(); + final var executorName = EsExecutors.executorName(threadName); assert threadName.startsWith("TEST-") || threadName.startsWith("LuceneTestCase") - || Arrays.stream(permittedThreadPoolNames).anyMatch(n -> threadName.contains('[' + n + ']')) + || Arrays.asList(permittedThreadPoolNames).contains(executorName) : threadName + " not in " + Arrays.toString(permittedThreadPoolNames) + " nor a test thread"; return true; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index b6c0b5047ab77..ea8662be9a0a5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -274,20 +274,6 @@ public void testValidation() { validationException.validationErrors().get(0) ); } - - { - RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); - rolloverRequest.setIndicesOptions( - IndicesOptions.builder(rolloverRequest.indicesOptions()) - .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) - .build() - ); - rolloverRequest.lazy(true); - ActionRequestValidationException validationException = rolloverRequest.validate(); - assertNotNull(validationException); - assertEquals(1, validationException.validationErrors().size()); - assertEquals("lazily rolling over a failure store is currently not supported", validationException.validationErrors().get(0)); - } } public void testParsingWithType() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 9dbabe2c41893..2dfc6ca24f4ac 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -116,7 +116,6 @@ public class TransportRolloverActionTests extends ESTestCase { mockClusterService, telemetryPlugin.getTelemetryProvider(Settings.EMPTY) ); - final DataStreamAutoShardingService dataStreamAutoShardingService = new DataStreamAutoShardingService( Settings.EMPTY, mockClusterService, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 742e0207b2cd4..76bf8dc79b855 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -14,6 +14,9 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; @@ -85,6 +88,7 @@ public class BulkOperationTests extends ESTestCase { private final String indexName = "my_index"; private final String dataStreamName = "my_data_stream"; private final String fsDataStreamName = "my_failure_store_data_stream"; + private final String fsRolloverDataStreamName = "my_failure_store_to_be_rolled_over_data_stream"; private final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) .settings( @@ -107,6 +111,15 @@ public class BulkOperationTests extends ESTestCase { private final IndexMetadata ds2FailureStore1 = DataStreamTestHelper.createFailureStore(fsDataStreamName, 1, millis) .numberOfShards(1) .build(); + private final IndexMetadata ds3BackingIndex1 = DataStreamTestHelper.createBackingIndex(fsRolloverDataStreamName, 1, millis) + .numberOfShards(2) + .build(); + private final IndexMetadata ds3FailureStore1 = DataStreamTestHelper.createFailureStore(fsRolloverDataStreamName, 1, millis) + .numberOfShards(1) + .build(); + private final IndexMetadata ds3FailureStore2 = DataStreamTestHelper.createFailureStore(fsRolloverDataStreamName, 2, millis) + .numberOfShards(1) + .build(); private final DataStream dataStream1 = DataStreamTestHelper.newInstance( dataStreamName, @@ -117,6 +130,13 @@ public class BulkOperationTests extends ESTestCase { List.of(ds2BackingIndex1.getIndex()), List.of(ds2FailureStore1.getIndex()) ); + private final DataStream dataStream3 = DataStream.builder(fsRolloverDataStreamName, List.of(ds3BackingIndex1.getIndex())) + .setGeneration(1) + .setFailureStoreEnabled(true) + .setFailureIndices( + DataStream.DataStreamIndices.failureIndicesBuilder(List.of(ds3FailureStore1.getIndex())).setRolloverOnWrite(true).build() + ) + .build(); private final ClusterState DEFAULT_STATE = ClusterState.builder(ClusterName.DEFAULT) .metadata( @@ -131,7 +151,7 @@ public class BulkOperationTests extends ESTestCase { .build(), "ds-template-with-failure-store", ComposableIndexTemplate.builder() - .indexPatterns(List.of(fsDataStreamName)) + .indexPatterns(List.of(fsDataStreamName, fsRolloverDataStreamName)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) .template(new Template(null, null, null, null)) .build() @@ -148,10 +168,17 @@ public class BulkOperationTests extends ESTestCase { ds2BackingIndex1.getIndex().getName(), ds2BackingIndex1, ds2FailureStore1.getIndex().getName(), - ds2FailureStore1 + ds2FailureStore1, + ds3BackingIndex1.getIndex().getName(), + ds3BackingIndex1, + ds3FailureStore1.getIndex().getName(), + ds3FailureStore1 ) ) - .dataStreams(Map.of(dataStreamName, dataStream1, fsDataStreamName, dataStream2), Map.of()) + .dataStreams( + Map.of(dataStreamName, dataStream1, fsDataStreamName, dataStream2, fsRolloverDataStreamName, dataStream3), + Map.of() + ) .build() ) .build(); @@ -759,6 +786,117 @@ public void testNodeClosureRejectsFailureStoreDocument() { verify(observer, times(1)).waitForNextChange(any()); } + /** + * When a bulk operation needs to redirect some documents that failed on the shard level, and that failure store is marked for lazy + * rollover, it first needs to roll over the failure store and then redirect the failure to the new failure index. + */ + public void testLazilyRollingOverFailureStore() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreFeatureFlagEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add( + new IndexRequest(fsRolloverDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE) + ); + bulkRequest.add( + new IndexRequest(fsRolloverDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE) + ); + + NodeClient client = getNodeClient( + shardSpecificResponse( + Map.of(new ShardId(ds3BackingIndex1.getIndex(), 0), failWithException(() -> new MapperException("test"))) + ), + (rolloverRequest, actionListener) -> actionListener.onResponse( + new RolloverResponse( + ds3FailureStore1.getIndex().getName(), + ds3FailureStore2.getIndex().getName(), + Map.of(), + false, + true, + true, + true, + false + ) + ) + ); + + DataStream rolledOverDataStream = dataStream3.copy() + .setFailureIndices( + dataStream3.getFailureIndices().copy().setIndices(List.of(ds3FailureStore1.getIndex(), ds3FailureStore2.getIndex())).build() + ) + .build(); + Metadata metadata = Metadata.builder(DEFAULT_STATE.metadata()) + .indices(Map.of(ds3FailureStore2.getIndex().getName(), ds3FailureStore2)) + .put(rolledOverDataStream) + .build(); + ClusterState rolledOverState = ClusterState.builder(DEFAULT_STATE).metadata(metadata).build(); + ClusterStateObserver observer = mockObserver(DEFAULT_STATE, DEFAULT_STATE, rolledOverState); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(item -> item.getIndex().equals(ds3FailureStore2.getIndex().getName())) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem, is(notNullValue())); + } + + /** + * When a bulk operation faces a failure while trying to roll over a failure store that was marked for lazy rollover, the exception + * should be added to the list of suppressed causes in the BulkItemResponse. + */ + public void testFailureWhileRollingOverFailureStore() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreFeatureFlagEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add( + new IndexRequest(fsRolloverDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE) + ); + bulkRequest.add( + new IndexRequest(fsRolloverDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE) + ); + + NodeClient client = getNodeClient( + shardSpecificResponse( + Map.of(new ShardId(ds3BackingIndex1.getIndex(), 0), failWithException(() -> new MapperException("test"))) + ), + ((rolloverRequest, actionListener) -> actionListener.onFailure(new Exception("rollover failed"))) + ); + + DataStream rolledOverDataStream = dataStream3.copy() + .setFailureIndices( + dataStream3.getFailureIndices().copy().setIndices(List.of(ds3FailureStore1.getIndex(), ds3FailureStore2.getIndex())).build() + ) + .build(); + Metadata metadata = Metadata.builder(DEFAULT_STATE.metadata()) + .indices(Map.of(ds3FailureStore2.getIndex().getName(), ds3FailureStore2)) + .put(rolledOverDataStream) + .build(); + ClusterState rolledOverState = ClusterState.builder(DEFAULT_STATE).metadata(metadata).build(); + ClusterStateObserver observer = mockObserver(DEFAULT_STATE, DEFAULT_STATE, rolledOverState); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("test"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(Exception.class))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), is(equalTo("rollover failed"))); + } + /** * Throws an assertion error with the given message if the client operation executes */ @@ -878,6 +1016,18 @@ private static BulkItemResponse requestToFailedResponse(BulkItemRequest itemRequ * @return A node client for the test. */ private NodeClient getNodeClient(BiConsumer> onShardAction) { + return getNodeClient(onShardAction, null); + } + + /** + * Create a client that redirects expected actions to the provided function and fails if an unexpected operation happens. + * @param onShardAction Called when TransportShardBulkAction is executed. + * @return A node client for the test. + */ + private NodeClient getNodeClient( + BiConsumer> onShardAction, + BiConsumer> onRolloverAction + ) { return new NoOpNodeClient(threadPool) { @Override @SuppressWarnings("unchecked") @@ -900,6 +1050,27 @@ public Task exe } return null; } + + @Override + @SuppressWarnings("unchecked") + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + if (LazyRolloverAction.INSTANCE.equals(action)) { + ActionListener notifyOnceListener = ActionListener.notifyOnce( + (ActionListener) listener + ); + try { + onRolloverAction.accept((RolloverRequest) request, notifyOnceListener); + } catch (Exception responseException) { + notifyOnceListener.onFailure(responseException); + } + } else { + fail("Unexpected client call to " + action.name()); + } + } }; } @@ -1000,9 +1171,9 @@ private BulkOperation newBulkOperation( /** * A default mock cluster state observer that simply returns the state */ - private ClusterStateObserver mockObserver(ClusterState state) { + private ClusterStateObserver mockObserver(ClusterState state, ClusterState... states) { ClusterStateObserver mockObserver = mock(ClusterStateObserver.class); - when(mockObserver.setAndGetObservedState()).thenReturn(state); + when(mockObserver.setAndGetObservedState()).thenReturn(state, states); when(mockObserver.isTimedOut()).thenReturn(false); return mockObserver; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index fc9e9f05542c9..47a6a03078b9a 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.EmptySystemIndices; @@ -195,7 +194,7 @@ public void onFailure(Exception e) { }; Map indicesToAutoCreate = Map.of(); // unused Set dataStreamsToRollover = Set.of(); // unused - Map indicesThatCannotBeCreated = Map.of(); // unused + Set failureStoresToRollover = Set.of(); // unused long startTime = 0; bulkAction.createMissingIndicesAndIndexData( task, @@ -204,7 +203,7 @@ public void onFailure(Exception e) { listener, indicesToAutoCreate, dataStreamsToRollover, - indicesThatCannotBeCreated, + failureStoresToRollover, startTime ); assertThat(onResponseCalled.get(), equalTo(true)); diff --git a/server/src/test/java/org/elasticsearch/action/support/MappedActionFiltersTests.java b/server/src/test/java/org/elasticsearch/action/support/MappedActionFiltersTests.java index 7df0c1c40a6cc..e4a3a7ca01b73 100644 --- a/server/src/test/java/org/elasticsearch/action/support/MappedActionFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/MappedActionFiltersTests.java @@ -59,11 +59,6 @@ public String actionName() { return "dummyAction"; } - @Override - public int order() { - return 0; - } - @Override public void apply( Task task, @@ -100,11 +95,6 @@ public String actionName() { return "dummyAction"; } - @Override - public int order() { - return 0; - } - @Override public void apply( Task task, @@ -123,11 +113,6 @@ public String actionName() { return "dummyAction"; } - @Override - public int order() { - return 0; - } - @Override public void apply( Task task, @@ -164,11 +149,6 @@ public String actionName() { return "dummyAction"; } - @Override - public int order() { - return 0; - } - @Override public void apply( Task task, diff --git a/server/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java b/server/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java deleted file mode 100644 index 1697c1400b24a..0000000000000 --- a/server/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.bootstrap; - -import org.apache.lucene.util.Constants; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.equalTo; - -public class JNANativesTests extends ESTestCase { - - public void testConsoleCtrlHandler() { - if (Constants.WINDOWS) { - assertNotNull(JNAKernel32Library.getInstance()); - assertThat(JNAKernel32Library.getInstance().getCallbacks().size(), equalTo(1)); - } else { - assertNotNull(JNAKernel32Library.getInstance()); - assertThat(JNAKernel32Library.getInstance().getCallbacks().size(), equalTo(0)); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index e1b59a77fef97..42cd339fbebdb 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -627,12 +627,15 @@ public void testFixedUnboundedRejectOnShutdown() { ); } - public void testParseExecutorName() { - String executorName = randomAlphaOfLength(10); - ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(rarely() ? null : randomAlphaOfLength(10), executorName); - Thread thread = threadFactory.newThread(() -> {}); - - assertThat(EsExecutors.executorName(thread), equalTo(executorName)); + public void testParseExecutorName() throws InterruptedException { + final var executorName = randomAlphaOfLength(10); + final var threadFactory = EsExecutors.daemonThreadFactory(rarely() ? null : randomAlphaOfLength(10), executorName); + final var thread = threadFactory.newThread(() -> {}); + try { + assertThat(EsExecutors.executorName(thread.getName()), equalTo(executorName)); + } finally { + thread.join(); + } } private static void runRejectOnShutdownTest(ExecutorService executor) { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java index 5b50eb63e1489..a3e11c0645e32 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.common.xcontent.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -420,4 +421,25 @@ public void testParseToType() throws IOException { assertThat(names, equalTo(Set.of("a", "c"))); } + + public void testDrainAndClose() throws IOException { + String json = """ + { "a": "b", "c": "d", "e": {"f": "g"}, "h": ["i", "j", {"k": "l"}]}"""; + var parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json); + var content = XContentBuilder.builder(XContentType.JSON.xContent()); + XContentHelper.drainAndClose(parser, content); + + assertEquals(json.replace(" ", ""), Strings.toString(content)); + assertTrue(parser.isClosed()); + } + + public void testDrainAndCloseAlreadyClosed() throws IOException { + var parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, "{}"); + parser.close(); + + assertThrows( + IllegalStateException.class, + () -> XContentHelper.drainAndClose(parser, XContentBuilder.builder(XContentType.JSON.xContent())) + ); + } } diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 0d38aaf5b3e4a..00582e2bc7942 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.HealthFeatures; +import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.HealthStatus; @@ -962,7 +963,20 @@ public void testLimitNumberOfAffectedResources() { assertThat(nonDataNonMasterAffectedResources.get(0).getNodes().size(), is(10)); } } + } + public void testSkippingFieldsWhenVerboseIsFalse() { + Set discoveryNodes = createNodesWithAllRoles(); + ClusterService clusterService = createClusterService(discoveryNodes, false); + DiskHealthIndicatorService diskHealthIndicatorService = new DiskHealthIndicatorService(clusterService, featureService); + HealthStatus expectedStatus = HealthStatus.RED; + HealthInfo healthInfo = createHealthInfoWithOneUnhealthyNode(expectedStatus, discoveryNodes); + HealthIndicatorResult result = diskHealthIndicatorService.calculate(false, healthInfo); + assertThat(result.status(), equalTo(expectedStatus)); + assertThat(result.details(), equalTo(HealthIndicatorDetails.EMPTY)); + assertThat(result.diagnosisList(), equalTo(List.of())); + assertThat(result.impacts().isEmpty(), equalTo(false)); + assertThat(result.symptom().isEmpty(), equalTo(false)); } // We expose the indicator name and the diagnoses in the x-pack usage API. In order to index them properly in a telemetry index diff --git a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java index c57f19999a915..1c3d0d486b282 100644 --- a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.HealthFeatures; +import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.index.IndexVersion; @@ -377,6 +378,21 @@ public void testMappedFieldsForTelemetry() { ); } + public void testSkippingFieldsWhenVerboseIsFalse() { + int maxShardsPerNodeFrozen = randomValidMaxShards(); + var clusterService = createClusterService(25, maxShardsPerNodeFrozen, createIndexInDataNode(11)); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( + false, + HealthInfo.EMPTY_HEALTH_INFO + ); + + assertEquals(indicatorResult.status(), RED); + assertEquals(indicatorResult.symptom(), "Cluster is close to reaching the configured maximum number of shards for data nodes."); + assertThat(indicatorResult.impacts(), equalTo(RED_INDICATOR_IMPACTS)); + assertThat(indicatorResult.diagnosisList(), hasSize(0)); + assertThat(indicatorResult.details(), is(HealthIndicatorDetails.EMPTY)); + } + private static int randomValidMaxShards() { return randomIntBetween(50, 1000); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesCodecDuelTests.java index e8bbe26a2fbf1..ac1232b6246ba 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesCodecDuelTests.java @@ -138,12 +138,15 @@ private void assertSortedDocValues(LeafReader baselineReader, LeafReader contend var baseline = baselineReader.getSortedDocValues(FIELD_1); var contender = contenderReader.getSortedDocValues(FIELD_1); assertEquals(baseline.getValueCount(), contender.getValueCount()); - for (int docId : docIdsToAdvanceTo) { - int baselineTarget = baseline.advance(docId); - int contenderTarget = contender.advance(docId); - assertDocIds(baseline, baselineTarget, contender, contenderTarget); + for (int i = 0; i < docIdsToAdvanceTo.length; i++) { + int docId = docIdsToAdvanceTo[i]; + int baselineTarget = assertAdvance(docId, baselineReader, contenderReader, baseline, contender); assertEquals(baseline.ordValue(), contender.ordValue()); assertEquals(baseline.lookupOrd(baseline.ordValue()), contender.lookupOrd(contender.ordValue())); + i = shouldSkipDocIds(i, docId, baselineTarget, docIdsToAdvanceTo); + if (i == -1) { + break; + } } } // test advanceExact() @@ -226,17 +229,20 @@ private void assertSortedSetDocValues(LeafReader baselineReader, LeafReader cont var baseline = baselineReader.getSortedSetDocValues(FIELD_2); var contender = contenderReader.getSortedSetDocValues(FIELD_2); assertEquals(baseline.getValueCount(), contender.getValueCount()); - for (int docId : docIdsToAdvanceTo) { - int baselineTarget = baseline.advance(docId); - int contenderTarget = contender.advance(docId); - assertDocIds(baseline, baselineTarget, contender, contenderTarget); + for (int i = 0; i < docIdsToAdvanceTo.length; i++) { + int docId = docIdsToAdvanceTo[i]; + int baselineTarget = assertAdvance(docId, baselineReader, contenderReader, baseline, contender); assertEquals(baseline.docValueCount(), contender.docValueCount()); - for (int i = 0; i < baseline.docValueCount(); i++) { + for (int j = 0; j < baseline.docValueCount(); j++) { long baselineOrd = baseline.nextOrd(); long contenderOrd = contender.nextOrd(); assertEquals(baselineOrd, contenderOrd); assertEquals(baseline.lookupOrd(baselineOrd), contender.lookupOrd(contenderOrd)); } + i = shouldSkipDocIds(i, docId, baselineTarget, docIdsToAdvanceTo); + if (i == -1) { + break; + } } } // test advanceExact() @@ -319,16 +325,19 @@ private void assertSortedNumericDocValues(LeafReader baselineReader, LeafReader { var baseline = baselineReader.getSortedNumericDocValues(FIELD_3); var contender = contenderReader.getSortedNumericDocValues(FIELD_3); - for (int docId : docIdsToAdvanceTo) { - int baselineTarget = baseline.advance(docId); - int contenderTarget = contender.advance(docId); - assertDocIds(baseline, baselineTarget, contender, contenderTarget); + for (int i = 0; i < docIdsToAdvanceTo.length; i++) { + int docId = docIdsToAdvanceTo[i]; + int baselineTarget = assertAdvance(docId, baselineReader, contenderReader, baseline, contender); assertEquals(baseline.docValueCount(), contender.docValueCount()); - for (int i = 0; i < baseline.docValueCount(); i++) { + for (int j = 0; j < baseline.docValueCount(); j++) { long baselineValue = baseline.nextValue(); long contenderValue = contender.nextValue(); assertEquals(baselineValue, contenderValue); } + i = shouldSkipDocIds(i, docId, baselineTarget, docIdsToAdvanceTo); + if (i == -1) { + break; + } } } // test advanceExact() @@ -366,11 +375,14 @@ private void assertNumericDocValues(LeafReader baselineReader, LeafReader conten { var baseline = baselineReader.getNumericDocValues(FIELD_4); var contender = contenderReader.getNumericDocValues(FIELD_4); - for (int docId : docIdsToAdvanceTo) { - int baselineTarget = baseline.advance(docId); - int contenderTarget = contender.advance(docId); - assertDocIds(baseline, baselineTarget, contender, contenderTarget); + for (int i = 0; i < docIdsToAdvanceTo.length; i++) { + int docId = docIdsToAdvanceTo[i]; + int baselineTarget = assertAdvance(docId, baselineReader, contenderReader, baseline, contender); assertEquals(baseline.longValue(), contender.longValue()); + i = shouldSkipDocIds(i, docId, baselineTarget, docIdsToAdvanceTo); + if (i == -1) { + break; + } } } // test advanceExact() @@ -403,11 +415,14 @@ private void assertBinaryDocValues(LeafReader baselineReader, LeafReader contend { var baseline = baselineReader.getBinaryDocValues(FIELD_5); var contender = contenderReader.getBinaryDocValues(FIELD_5); - for (int docId : docIdsToAdvanceTo) { - int baselineTarget = baseline.advance(docId); - int contenderTarget = contender.advance(docId); - assertDocIds(baseline, baselineTarget, contender, contenderTarget); + for (int i = 0; i < docIdsToAdvanceTo.length; i++) { + int docId = docIdsToAdvanceTo[i]; + int baselineTarget = assertAdvance(docId, baselineReader, contenderReader, baseline, contender); assertEquals(baseline.binaryValue(), contender.binaryValue()); + i = shouldSkipDocIds(i, docId, baselineTarget, docIdsToAdvanceTo); + if (i == -1) { + break; + } } } // test advanceExact() @@ -424,6 +439,39 @@ private void assertBinaryDocValues(LeafReader baselineReader, LeafReader contend } } + private static int assertAdvance( + int docId, + LeafReader baselineReader, + LeafReader contenderReader, + DocIdSetIterator baseline, + DocIdSetIterator contender + ) throws IOException { + assert docId < baselineReader.maxDoc() : "exhausted DocIdSetIterator yields undefined behaviour"; + assert docId > baseline.docID() + : "target must be greater then the current docId in DocIdSetIterator, otherwise this can yield undefined behaviour"; + int baselineTarget = baseline.advance(docId); + assert docId < contenderReader.maxDoc() : "exhausted DocIdSetIterator yields undefined behaviour"; + assert docId > contender.docID() + : "target must be greater then the current docId in DocIdSetIterator, otherwise this can yield undefined behaviour"; + int contenderTarget = contender.advance(docId); + assertDocIds(baseline, baselineTarget, contender, contenderTarget); + return baselineTarget; + } + + private static int shouldSkipDocIds(int i, int docId, int baselineTarget, Integer[] docIdsToAdvanceTo) { + if (i < (docIdsToAdvanceTo.length - 1) && baselineTarget > docId) { + for (int j = i + 1; j < docIdsToAdvanceTo.length; j++) { + int nextDocId = docIdsToAdvanceTo[j]; + if (nextDocId > baselineTarget) { + return j - 1; // -1 because the loop from which this method is invoked executes: i++ + } + } + return -1; + } else { + return i; + } + } + private static void assertDocIds(DocIdSetIterator baseline, int baselineDocId, DocIdSetIterator contender, int contenderDocId) { assertEquals(baselineDocId, contenderDocId); assertEquals(baselineDocId, baseline.docID()); diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java index 572375d64d8b8..7744eed90e1cc 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java @@ -304,6 +304,29 @@ public void testLimitNumberOfAffectedResources() { ); } + public void testSkippingFieldsWhenVerboseIsFalse() { + var repos = appendToCopy( + randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)), + createRepositoryMetadata("corrupted-repo", true) + ); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); + + assertThat( + service.calculate(false, healthInfo), + equalTo( + new HealthIndicatorResult( + NAME, + YELLOW, + "Detected [1] corrupted snapshot repository.", + HealthIndicatorDetails.EMPTY, + RepositoryIntegrityHealthIndicatorService.IMPACTS, + List.of() + ) + ) + ); + } + private List createDiagnoses( List repos, DiscoveryNodes nodes, diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index e693f9a1562fd..cb9a227e73fcf 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -61,8 +61,6 @@ public class HeapAttackIT extends ESRestTestCase { static volatile boolean SUITE_ABORTED = false; - private static String ESQL_VERSION = "2024.04.01"; - @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); @@ -156,7 +154,7 @@ private Response groupOnManyLongs(int count) throws IOException { } private StringBuilder makeManyLongs(int count) { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM manylongs\\n| EVAL i0 = a + b, i1 = b + i0"); for (int i = 2; i < count; i++) { query.append(", i").append(i).append(" = i").append(i - 2).append(" + ").append(i - 1); @@ -187,7 +185,7 @@ public void testHugeConcat() throws IOException { } private Response concat(int evals) throws IOException { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM single | EVAL str = TO_STRING(a)"); for (int e = 0; e < evals; e++) { query.append("\n| EVAL str=CONCAT(") @@ -224,7 +222,7 @@ public void testHugeManyConcat() throws IOException { * Tests that generate many moderately long strings. */ private Response manyConcat(int strings) throws IOException { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM manylongs | EVAL str = CONCAT("); query.append( Arrays.stream(new String[] { "a", "b", "c", "d", "e" }) @@ -276,7 +274,7 @@ public void testTooManyEval() throws IOException { } private Response manyEval(int evalLines) throws IOException { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM manylongs"); for (int e = 0; e < evalLines; e++) { query.append("\n| EVAL "); @@ -358,7 +356,7 @@ public void testFetchTooManyBigFields() throws IOException { * Fetches documents containing 1000 fields which are {@code 1kb} each. */ private void fetchManyBigFields(int docs) throws IOException { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}"); Response response = query(query.toString(), "columns"); Map map = responseAsMap(response); @@ -387,7 +385,7 @@ public void testAggTooManyMvLongs() throws IOException { } private Response aggMvLongs(int fields) throws IOException { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM mv_longs | STATS MAX(f00) BY f00"); for (int f = 1; f < fields; f++) { query.append(", f").append(String.format(Locale.ROOT, "%02d", f)); @@ -413,7 +411,7 @@ public void testFetchTooManyMvLongs() throws IOException { } private Response fetchMvLongs() throws IOException { - StringBuilder query = startQueryWithVersion(ESQL_VERSION); + StringBuilder query = startQuery(); query.append("FROM mv_longs\"}"); return query(query.toString(), "columns"); } @@ -584,11 +582,9 @@ public void assertRequestBreakerEmpty() throws Exception { }); } - private static StringBuilder startQueryWithVersion(String version) { + private static StringBuilder startQuery() { StringBuilder query = new StringBuilder(); - query.append("{\"version\":\"" + version + "\","); - query.append("\"query\":\""); - + query.append("{\"query\":\""); return query; } } diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 3aed133c590f7..f23048185876b 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -221,7 +221,7 @@ static Map getCodebases() { addClassCodebase(codebases, "elasticsearch-core", "org.elasticsearch.core.Booleans"); addClassCodebase(codebases, "elasticsearch-cli", "org.elasticsearch.cli.Command"); addClassCodebase(codebases, "elasticsearch-preallocate", "org.elasticsearch.preallocate.Preallocate"); - addClassCodebase(codebases, "elasticsearch-vec", "org.elasticsearch.vec.VectorScorer"); + addClassCodebase(codebases, "elasticsearch-vec", "org.elasticsearch.vec.VectorScorerFactory"); addClassCodebase(codebases, "framework", "org.elasticsearch.test.ESTestCase"); return codebases; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index f9af0d27f3e6f..b2c2afd9df429 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1092,6 +1092,14 @@ private Object expectedParsedForBlockLoader() throws IOException { public record SyntheticSourceInvalidExample(Matcher error, CheckedConsumer mapping) {} public interface SyntheticSourceSupport { + /** + * @return True if synthetic source support is implemented to exactly store the source + * without modifications. + */ + default boolean preservesExactSource() { + return false; + } + /** * Examples that should work when source is generated from doc values. */ @@ -1115,7 +1123,7 @@ public final void testSyntheticSource() throws IOException { assertSyntheticSource(syntheticSourceSupport(ignoreMalformed).example(5)); } - public final void testSyntheticSourceIgnoreMalformedExamples() throws IOException { + public void testSyntheticSourceIgnoreMalformedExamples() throws IOException { assumeTrue("type doesn't support ignore_malformed", supportsIgnoreMalformed()); CheckedConsumer mapping = syntheticSourceSupport(true).example(1).mapping(); for (ExampleMalformedValue v : exampleMalformedValues()) { @@ -1177,7 +1185,7 @@ public final void testSyntheticSourceMany() throws IOException { ) { for (int i = 0; i < count; i++) { if (rarely() && supportsEmptyInputArray()) { - expected[i] = "{}"; + expected[i] = support.preservesExactSource() ? "{\"field\":[]}" : "{}"; iw.addDocument(mapper.parse(source(b -> b.startArray("field").endArray())).rootDoc()); continue; } @@ -1236,13 +1244,16 @@ public final void testSyntheticSourceInObject() throws IOException { public final void testSyntheticEmptyList() throws IOException { assumeTrue("Field does not support [] as input", supportsEmptyInputArray()); boolean ignoreMalformed = supportsIgnoreMalformed() ? rarely() : false; - SyntheticSourceExample syntheticSourceExample = syntheticSourceSupport(ignoreMalformed).example(5); + SyntheticSourceSupport support = syntheticSourceSupport(ignoreMalformed); + SyntheticSourceExample syntheticSourceExample = support.example(5); DocumentMapper mapper = createDocumentMapper(syntheticSourceMapping(b -> { b.startObject("field"); syntheticSourceExample.mapping().accept(b); b.endObject(); })); - assertThat(syntheticSource(mapper, b -> b.startArray("field").endArray()), equalTo("{}")); + + var expected = support.preservesExactSource() ? "{\"field\":[]}" : "{}"; + assertThat(syntheticSource(mapper, b -> b.startArray("field").endArray()), equalTo(expected)); } public final void testSyntheticEmptyListNoDocValuesLoader() throws IOException { diff --git a/x-pack/plugin/apm-data/README.md b/x-pack/plugin/apm-data/README.md index 10892d767b536..9334ba97df9a9 100644 --- a/x-pack/plugin/apm-data/README.md +++ b/x-pack/plugin/apm-data/README.md @@ -11,6 +11,18 @@ See [x-pack/plugin/core/src/main/resources](../core/src/main/resources). This plugin is intended to work with data produced by https://github.com/elastic/apm-data. + +## Adding/Removing/Updating a resource + +All resources are defined as YAML under [src/main/resources](src/main/resources). + +For a resource to be known to the plugin it must be added to +[src/main/resources/resources.yaml](src/main/resources/resources.yaml) in the +appropriate section. + +Any update to resources included by this package also requires a bump to the +`version` property included in the resources file. + ## Testing ## Unit testing diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm@settings.yaml new file mode 100644 index 0000000000000..323f2340fb322 --- /dev/null +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm@settings.yaml @@ -0,0 +1,8 @@ +--- +version: ${xpack.apmdata.template.version} +_meta: + description: Default settings for logs-apm.* data streams + managed: true +template: + settings: + codec: best_compression diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml index 95f60ac6b5dec..21cad50f3fe90 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml @@ -11,6 +11,7 @@ composed_of: - apm@mappings - apm@settings - apm-10d@lifecycle +- logs-apm@settings - logs@custom - logs-apm.app@custom - ecs@mappings diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml index 6401f4db5018d..2cfa7b454722f 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml @@ -12,6 +12,7 @@ composed_of: - apm@mappings - apm@settings - apm-10d@lifecycle +- logs-apm@settings - logs-apm.error@mappings - logs@custom - logs-apm.error@custom diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 6f36c66252d69..efa6ae694c464 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 5 +version: 6 component-templates: # Data lifecycle. @@ -14,6 +14,7 @@ component-templates: # - metrics-apm* data streams additionally compose metrics-apm@* - apm@mappings - apm@settings + - logs-apm@settings - metrics-apm@mappings - metrics-apm@settings # Data stream-specific mappings. diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 0cf3832179c2d..fb35b34fd4dfd 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -1,5 +1,6 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.Version import java.nio.file.Paths @@ -172,6 +173,7 @@ testClusters.configureEach { setting 'indices.lifecycle.history_index_enabled', 'false' keystore 'bootstrap.password', 'x-pack-test-password' user username: "x_pack_rest_user", password: "x-pack-test-password" + requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.15.0") } if (BuildParams.inFipsJvm) { diff --git a/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java b/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java index 158bcce7c9555..083850e80dd47 100644 --- a/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java +++ b/x-pack/plugin/core/src/javaRestTest/java/org/elasticsearch/xpack/core/DataStreamRestIT.java @@ -20,8 +20,8 @@ import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.notNullValue; public class DataStreamRestIT extends ESRestTestCase { @@ -42,19 +42,24 @@ public void testDSXpackInfo() { assertTrue((boolean) dataStreams.get("enabled")); } + @SuppressWarnings("unchecked") public void testDSXpackUsage() throws Exception { Map dataStreams = (Map) getLocation("/_xpack/usage").get("data_streams"); assertNotNull(dataStreams); assertTrue((boolean) dataStreams.get("available")); assertTrue((boolean) dataStreams.get("enabled")); - assertThat(dataStreams.get("data_streams"), anyOf(equalTo(null), equalTo(0))); - + assertThat(dataStreams.get("data_streams"), equalTo(0)); + assertThat(dataStreams, hasKey("failure_store")); + Map failureStoreStats = (Map) dataStreams.get("failure_store"); + assertThat(failureStoreStats.get("enabled_count"), equalTo(0)); + assertThat(failureStoreStats.get("failure_indices_count"), equalTo(0)); assertBusy(() -> { Map logsTemplate = (Map) ((List) getLocation("/_index_template/logs").get("index_templates")).get(0); assertThat(logsTemplate, notNullValue()); assertThat(logsTemplate.get("name"), equalTo("logs")); assertThat(((Map) logsTemplate.get("index_template")).get("data_stream"), notNullValue()); }); + putFailureStoreTemplate(); // Create a data stream Request indexRequest = new Request("POST", "/logs-mysql-default/_doc"); @@ -65,21 +70,29 @@ public void testDSXpackUsage() throws Exception { Request rollover = new Request("POST", "/logs-mysql-default/_rollover"); client().performRequest(rollover); + // Create failure store data stream + indexRequest = new Request("POST", "/fs/_doc"); + indexRequest.setJsonEntity("{\"@timestamp\": \"2020-01-01\"}"); + client().performRequest(indexRequest); + dataStreams = (Map) getLocation("/_xpack/usage").get("data_streams"); assertNotNull(dataStreams); assertTrue((boolean) dataStreams.get("available")); assertTrue((boolean) dataStreams.get("enabled")); - assertThat("got: " + dataStreams, dataStreams.get("data_streams"), equalTo(1)); - assertThat("got: " + dataStreams, dataStreams.get("indices_count"), equalTo(2)); + assertThat("got: " + dataStreams, dataStreams.get("data_streams"), equalTo(2)); + assertThat("got: " + dataStreams, dataStreams.get("indices_count"), equalTo(3)); + failureStoreStats = (Map) dataStreams.get("failure_store"); + assertThat(failureStoreStats.get("enabled_count"), equalTo(1)); + assertThat(failureStoreStats.get("failure_indices_count"), equalTo(1)); } Map getLocation(String path) { try { - Response executeRepsonse = client().performRequest(new Request("GET", path)); + Response executeResponse = client().performRequest(new Request("GET", path)); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, - EntityUtils.toByteArray(executeRepsonse.getEntity()) + EntityUtils.toByteArray(executeResponse.getEntity()) ) ) { return parser.map(); @@ -89,4 +102,15 @@ Map getLocation(String path) { throw new RuntimeException(e); } } + + private void putFailureStoreTemplate() { + try { + Request request = new Request("PUT", "/_index_template/fs-template"); + request.setJsonEntity("{\"index_patterns\": [\"fs*\"], \"data_stream\": {\"failure_store\": true}}"); + assertAcknowledged(client().performRequest(request)); + } catch (Exception e) { + fail("failed to insert index template with failure store enabled - got: " + e); + throw new RuntimeException(e); + } + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index ecc5a8b7eec30..9f6d502435d3a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -89,6 +89,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams; import org.elasticsearch.xpack.core.slm.SLMFeatureSetUsage; import org.elasticsearch.xpack.core.slm.SnapshotLifecycleMetadata; import org.elasticsearch.xpack.core.spatial.SpatialFeatureSetUsage; @@ -298,7 +299,12 @@ public List getNamedWriteables() { XPackField.ENTERPRISE_SEARCH, EnterpriseSearchFeatureSetUsage::new ), - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.UNIVERSAL_PROFILING, ProfilingUsage::new) + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.UNIVERSAL_PROFILING, ProfilingUsage::new), + new NamedWriteableRegistry.Entry( + PersistentTaskParams.class, + SecurityMigrationTaskParams.TASK_NAME, + SecurityMigrationTaskParams::new + ) ).filter(Objects::nonNull).toList(); } @@ -369,6 +375,11 @@ public List getNamedXContent() { Metadata.Custom.class, new ParseField(TransformMetadata.TYPE), parser -> TransformMetadata.LENIENT_PARSER.parse(parser, null).build() + ), + new NamedXContentRegistry.Entry( + PersistentTaskParams.class, + new ParseField(SecurityMigrationTaskParams.TASK_NAME), + SecurityMigrationTaskParams::fromXContent ) ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java index fd62289c51c93..ba76788d695d4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java @@ -50,9 +50,25 @@ protected void masterOperation( ActionListener listener ) { final Map dataStreams = state.metadata().dataStreams(); + long backingIndicesCounter = 0; + long failureStoreEnabledCounter = 0; + long failureIndicesCounter = 0; + for (DataStream ds : dataStreams.values()) { + backingIndicesCounter += ds.getIndices().size(); + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + if (ds.isFailureStoreEnabled()) { + failureStoreEnabledCounter++; + } + if (ds.getFailureIndices().getIndices().isEmpty() == false) { + failureIndicesCounter += ds.getFailureIndices().getIndices().size(); + } + } + } final DataStreamFeatureSetUsage.DataStreamStats stats = new DataStreamFeatureSetUsage.DataStreamStats( dataStreams.size(), - dataStreams.values().stream().map(ds -> ds.getIndices().size()).reduce(Integer::sum).orElse(0) + backingIndicesCounter, + failureStoreEnabledCounter, + failureIndicesCounter ); final DataStreamFeatureSetUsage usage = new DataStreamFeatureSetUsage(stats); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/api/filtering/ApiFilteringActionFilter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/api/filtering/ApiFilteringActionFilter.java index dee076631d407..509259ec147c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/api/filtering/ApiFilteringActionFilter.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/api/filtering/ApiFilteringActionFilter.java @@ -31,11 +31,6 @@ protected ApiFilteringActionFilter(ThreadContext threadContext, String actionNam this.responseClass = responseClass; } - @Override - public int order() { - return 0; - } - @Override public final String actionName() { return actionName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java index d411512275fc1..63fcd3dc4e798 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamFeatureSetUsage.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -49,6 +50,12 @@ protected void innerXContent(XContentBuilder builder, Params params) throws IOEx super.innerXContent(builder, params); builder.field("data_streams", streamStats.totalDataStreamCount); builder.field("indices_count", streamStats.indicesBehindDataStream); + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + builder.startObject("failure_store"); + builder.field("enabled_count", streamStats.failureStoreEnabledDataStreamCount); + builder.field("failure_indices_count", streamStats.failureStoreIndicesCount); + builder.endObject(); + } } @Override @@ -73,39 +80,30 @@ public boolean equals(Object obj) { return Objects.equals(streamStats, other.streamStats); } - public static class DataStreamStats implements Writeable { - - private final long totalDataStreamCount; - private final long indicesBehindDataStream; - - public DataStreamStats(long totalDataStreamCount, long indicesBehindDataStream) { - this.totalDataStreamCount = totalDataStreamCount; - this.indicesBehindDataStream = indicesBehindDataStream; - } + public record DataStreamStats( + long totalDataStreamCount, + long indicesBehindDataStream, + long failureStoreEnabledDataStreamCount, + long failureStoreIndicesCount + ) implements Writeable { public DataStreamStats(StreamInput in) throws IOException { - this.totalDataStreamCount = in.readVLong(); - this.indicesBehindDataStream = in.readVLong(); + this( + in.readVLong(), + in.readVLong(), + in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_TELEMETRY) ? in.readVLong() : 0, + in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_TELEMETRY) ? in.readVLong() : 0 + ); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(this.totalDataStreamCount); out.writeVLong(this.indicesBehindDataStream); - } - - @Override - public int hashCode() { - return Objects.hash(totalDataStreamCount, indicesBehindDataStream); - } - - @Override - public boolean equals(Object obj) { - if (obj.getClass() != getClass()) { - return false; + if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_TELEMETRY)) { + out.writeVLong(this.failureStoreEnabledDataStreamCount); + out.writeVLong(this.failureStoreIndicesCount); } - DataStreamStats other = (DataStreamStats) obj; - return totalDataStreamCount == other.totalDataStreamCount && indicesBehindDataStream == other.indicesBehindDataStream; } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java index dcd89c200db26..9faa78d3b34f9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequest.java @@ -21,9 +21,6 @@ protected EsqlQueryRequest(StreamInput in) throws IOException { super(in); } - // Use the unparsed version String, so we don't have to serialize a version object. - public abstract String esqlVersion(); - public abstract String query(); public abstract QueryBuilder filter(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java index acd44165cad65..a0a2bbc3bed19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java @@ -35,8 +35,6 @@ public final ActionType action() { return action; } - public abstract EsqlQueryRequestBuilder esqlVersion(String esqlVersion); - public abstract EsqlQueryRequestBuilder query(String query); public abstract EsqlQueryRequestBuilder filter(QueryBuilder filter); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/UpdateIndexMigrationVersionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/UpdateIndexMigrationVersionAction.java new file mode 100644 index 0000000000000..f28014afde295 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/UpdateIndexMigrationVersionAction.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.SimpleBatchedExecutor; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.service.MasterServiceTaskQueue; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Map; + +/** + * Updates the migration version in the custom metadata for an index in cluster state + */ +public class UpdateIndexMigrationVersionAction extends ActionType { + + public static final UpdateIndexMigrationVersionAction INSTANCE = new UpdateIndexMigrationVersionAction(); + public static final String NAME = "internal:index/metadata/migration_version/update"; + public static final String MIGRATION_VERSION_CUSTOM_KEY = "migration_version"; + public static final String MIGRATION_VERSION_CUSTOM_DATA_KEY = "version"; + + public UpdateIndexMigrationVersionAction() { + super(NAME); + } + + public static class Request extends MasterNodeRequest { + private final int indexMigrationVersion; + private final String indexName; + + public Request(TimeValue timeout, int indexMigrationVersion, String indexName) { + super(timeout); + this.indexMigrationVersion = indexMigrationVersion; + this.indexName = indexName; + } + + protected Request(StreamInput in) throws IOException { + super(in); + this.indexMigrationVersion = in.readInt(); + this.indexName = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeInt(indexMigrationVersion); + out.writeString(indexName); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public int getIndexMigrationVersion() { + return indexMigrationVersion; + } + + public String getIndexName() { + return indexName; + } + } + + public static class TransportAction extends TransportMasterNodeAction { + private final MasterServiceTaskQueue updateIndexMigrationVersionTaskQueue; + + @Inject + public TransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + UpdateIndexMigrationVersionAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + Request::new, + indexNameExpressionResolver, + UpdateIndexMigrationVersionResponse::new, + threadPool.executor(ThreadPool.Names.MANAGEMENT) + ); + this.updateIndexMigrationVersionTaskQueue = clusterService.createTaskQueue( + "update-index-migration-version-task-queue", + Priority.LOW, + UPDATE_INDEX_MIGRATION_VERSION_TASK_EXECUTOR + ); + } + + private static final SimpleBatchedExecutor UPDATE_INDEX_MIGRATION_VERSION_TASK_EXECUTOR = + new SimpleBatchedExecutor<>() { + @Override + public Tuple executeTask(UpdateIndexMigrationVersionTask task, ClusterState clusterState) { + return Tuple.tuple(task.execute(clusterState), null); + } + + @Override + public void taskSucceeded(UpdateIndexMigrationVersionTask task, Void unused) { + task.listener.onResponse(null); + } + }; + + static class UpdateIndexMigrationVersionTask implements ClusterStateTaskListener { + private final ActionListener listener; + private final int indexMigrationVersion; + private final String indexName; + + UpdateIndexMigrationVersionTask(ActionListener listener, int indexMigrationVersion, String indexName) { + this.listener = listener; + this.indexMigrationVersion = indexMigrationVersion; + this.indexName = indexName; + } + + ClusterState execute(ClusterState currentState) { + IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(currentState.metadata().getIndices().get(indexName)); + indexMetadataBuilder.putCustom( + MIGRATION_VERSION_CUSTOM_KEY, + Map.of(MIGRATION_VERSION_CUSTOM_DATA_KEY, Integer.toString(indexMigrationVersion)) + ); + indexMetadataBuilder.version(indexMetadataBuilder.version() + 1); + + final ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder( + currentState.metadata().getIndices() + ); + builder.put(indexName, indexMetadataBuilder.build()); + + return ClusterState.builder(currentState) + .metadata(Metadata.builder(currentState.metadata()).indices(builder.build()).build()) + .build(); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + } + + @Override + protected void masterOperation( + Task task, + Request request, + ClusterState state, + ActionListener listener + ) throws Exception { + updateIndexMigrationVersionTaskQueue.submitTask( + "Updating cluster state with a new index migration version", + new UpdateIndexMigrationVersionTask( + ActionListener.wrap(response -> listener.onResponse(new UpdateIndexMigrationVersionResponse()), listener::onFailure), + request.getIndexMigrationVersion(), + request.getIndexName() + ), + null + ); + } + + @Override + protected ClusterBlockException checkBlock(Request request, ClusterState state) { + return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, new String[] { request.getIndexName() }); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/UpdateIndexMigrationVersionResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/UpdateIndexMigrationVersionResponse.java new file mode 100644 index 0000000000000..e5377c3b2f4d1 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/UpdateIndexMigrationVersionResponse.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class UpdateIndexMigrationVersionResponse extends ActionResponse { + public UpdateIndexMigrationVersionResponse(StreamInput in) throws IOException { + super(in); + } + + public UpdateIndexMigrationVersionResponse() {} + + @Override + public void writeTo(StreamOutput out) throws IOException {} +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 1dc293f929121..baf72a3411cde 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -416,6 +416,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return toXContent(builder, params, false); } + public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation) throws IOException { + return toXContent(builder, params, docCreation, false); + } + /** * Generates x-content for this {@link RoleDescriptor} instance. * @@ -424,10 +428,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws * @param docCreation {@code true} if the x-content is being generated for creating a document * in the security index, {@code false} if the x-content being generated * is for API display purposes + * @param includeMetadataFlattened {@code true} if the metadataFlattened field should be included in doc * @return x-content builder * @throws IOException if there was an error writing the x-content to the builder */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation, boolean includeMetadataFlattened) + throws IOException { builder.startObject(); builder.array(Fields.CLUSTER.getPreferredName(), clusterPrivileges); if (configurableClusterPrivileges.length != 0) { @@ -440,6 +446,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea builder.array(Fields.RUN_AS.getPreferredName(), runAs); } builder.field(Fields.METADATA.getPreferredName(), metadata); + if (includeMetadataFlattened) { + builder.field(Fields.METADATA_FLATTENED.getPreferredName(), metadata); + } if (docCreation) { builder.field(Fields.TYPE.getPreferredName(), ROLE_TYPE); } else { @@ -584,6 +593,16 @@ public RoleDescriptor parse(String name, XContentParser parser) throws IOExcepti ); } metadata = parser.map(); + } else if (Fields.METADATA_FLATTENED.match(currentFieldName, parser.getDeprecationHandler())) { + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException( + "expected field [{}] to be of type object, but found [{}] instead", + currentFieldName, + token + ); + } + // consume object but just drop + parser.map(); } else if (Fields.TRANSIENT_METADATA.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.START_OBJECT) { // consume object but just drop @@ -1856,6 +1875,8 @@ public interface Fields { ParseField GRANT_FIELDS = new ParseField("grant"); ParseField EXCEPT_FIELDS = new ParseField("except"); ParseField METADATA = new ParseField("metadata"); + + ParseField METADATA_FLATTENED = new ParseField("metadata_flattened"); ParseField TRANSIENT_METADATA = new ParseField("transient_metadata"); ParseField TYPE = new ParseField("type"); ParseField RESTRICTION = new ParseField("restriction"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index eb4b7efdb88b0..41da995797e29 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -137,6 +137,9 @@ static RoleDescriptor kibanaSystem(String name) { RoleDescriptor.IndicesPrivileges.builder().indices("traces-apm.*").privileges("read", "read_cross_cluster").build(), RoleDescriptor.IndicesPrivileges.builder().indices("traces-apm-*").privileges("read", "read_cross_cluster").build(), + // Logstash telemetry queries of kibana task runner to access Logstash metric indices + RoleDescriptor.IndicesPrivileges.builder().indices("metrics-logstash.*").privileges("read").build(), + // Data telemetry reads mappings, metadata and stats of indices RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("view_index_metadata", "monitor").build(), // Endpoint diagnostic information. Kibana reads from these indices to send telemetry @@ -246,6 +249,7 @@ static RoleDescriptor kibanaSystem(String name) { ".logs-endpoint.heartbeat-*", ".logs-osquery_manager.actions-*", ".logs-osquery_manager.action.responses-*", + "logs-osquery_manager.action.responses-*", "profiling-*" ) .privileges( @@ -266,11 +270,16 @@ static RoleDescriptor kibanaSystem(String name) { .indices(".logs-endpoint.actions-*") .privileges("auto_configure", "read", "write") .build(), - // Osquery manager specific action responses. Kibana reads from these to display responses to the user. + // Legacy Osquery manager specific action responses. Kibana reads from these to display responses to the user. RoleDescriptor.IndicesPrivileges.builder() .indices(".logs-osquery_manager.action.responses-*") .privileges("auto_configure", "create_index", "read", "index", "delete") .build(), + // Osquery manager specific action responses. Kibana reads from these to display responses to the user. + RoleDescriptor.IndicesPrivileges.builder() + .indices("logs-osquery_manager.action.responses-*") + .privileges("read", "view_index_metadata") + .build(), // Osquery manager specific actions. Kibana reads and writes to this index to track new actions and display them. RoleDescriptor.IndicesPrivileges.builder() .indices(".logs-osquery_manager.actions-*") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java new file mode 100644 index 0000000000000..d54f3098fead9 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.support; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class SecurityMigrationTaskParams implements PersistentTaskParams { + public static final String TASK_NAME = "security-migration"; + + private final int migrationVersion; + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + TASK_NAME, + true, + (arr) -> new SecurityMigrationTaskParams((int) arr[0]) + ); + + static { + PARSER.declareInt(constructorArg(), new ParseField("migration_version")); + } + + public SecurityMigrationTaskParams(int migrationVersion) { + this.migrationVersion = migrationVersion; + } + + public SecurityMigrationTaskParams(StreamInput in) throws IOException { + this.migrationVersion = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInt(migrationVersion); + } + + @Override + public String getWriteableName() { + return TASK_NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ADD_METADATA_FLATTENED_TO_ROLES; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("migration_version", migrationVersion); + builder.endObject(); + return builder; + } + + public static SecurityMigrationTaskParams fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public int getMigrationVersion() { + return migrationVersion; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/searchablesnapshots/DataStreamFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/searchablesnapshots/DataStreamFeatureSetUsageTests.java index 76cf0ed99ebcb..3ff36c52229e7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/searchablesnapshots/DataStreamFeatureSetUsageTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/searchablesnapshots/DataStreamFeatureSetUsageTests.java @@ -16,7 +16,12 @@ public class DataStreamFeatureSetUsageTests extends AbstractWireSerializingTestC @Override protected DataStreamFeatureSetUsage createTestInstance() { return new DataStreamFeatureSetUsage( - new DataStreamFeatureSetUsage.DataStreamStats(randomNonNegativeLong(), randomNonNegativeLong()) + new DataStreamFeatureSetUsage.DataStreamStats( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong() + ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index e18b36fbdce7e..f0676f35ae316 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -995,6 +995,33 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + Arrays.asList("logs-osquery_manager.action.responses-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // Tests for third-party agent indices that `kibana_system` has only `read` access Arrays.asList( "logs-sentinel_one." + randomAlphaOfLength(randomIntBetween(0, 13)), @@ -1498,6 +1525,34 @@ public void testKibanaSystemRole() { ); }); + // read-only index for Osquery actions responses + Arrays.asList("logs-osquery_manager.action.responses-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((cspIndex) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(cspIndex); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // read-only datastream for csp indices Arrays.asList("logs-cloud_security_posture.findings-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((cspIndex) -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(cspIndex); @@ -1686,6 +1741,33 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertViewIndexMetadata(kibanaRole, indexName); }); + + Arrays.asList("metrics-logstash." + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((indexName) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); } public void testKibanaAdminRole() { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_template.json index 8e7bf77f649a7..7914cc9bd6a8a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_template.json @@ -7,9 +7,6 @@ "template" : { "settings" : { "index" : { - "translog" : { - "durability" : "async" - }, "auto_expand_replicas" : "0-1", "query" : { "default_field" : "all_field_values" diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml index 235ac238a8563..cafdf0791de2f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/80_connector_update_last_sync_stats.yml @@ -48,6 +48,33 @@ setup: - match: { last_sync_error: "oh no error" } - match: { last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" } +--- +"Update Connector Last Sync Stats - Supports different partial updates": + - do: + connector.last_sync: + connector_id: test-connector + body: + last_deleted_document_count: 43 + + - match: { result: updated } + + - do: + connector.last_sync: + connector_id: test-connector + body: + last_indexed_document_count: 42 + + - match: { result: updated } + + + - do: + connector.get: + connector_id: test-connector + + - match: { last_deleted_document_count: 43 } + - match: { last_indexed_document_count: 42 } + + --- "Update Connector Last Sync Stats - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 5bae203175d36..e9361d78ad707 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -312,14 +312,14 @@ public Connector(StreamInput in) throws IOException { ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); - PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareLongOrNull(optionalConstructorArg(), 0L, ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareField( optionalConstructorArg(), (p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName()), ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, ObjectParser.ValueType.STRING_OR_NULL ); - PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareLongOrNull(optionalConstructorArg(), 0L, ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java index 7daae030155b7..62d07587701e4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java @@ -101,22 +101,78 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { public static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); public static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); + public String getLastAccessControlSyncError() { + return lastAccessControlSyncError; + } + + public Instant getLastAccessControlSyncScheduledAt() { + return lastAccessControlSyncScheduledAt; + } + + public ConnectorSyncStatus getLastAccessControlSyncStatus() { + return lastAccessControlSyncStatus; + } + + public Long getLastDeletedDocumentCount() { + return lastDeletedDocumentCount; + } + + public Instant getLastIncrementalSyncScheduledAt() { + return lastIncrementalSyncScheduledAt; + } + + public Long getLastIndexedDocumentCount() { + return lastIndexedDocumentCount; + } + + public String getLastSyncError() { + return lastSyncError; + } + + public Instant getLastSyncScheduledAt() { + return lastSyncScheduledAt; + } + + public ConnectorSyncStatus getLastSyncStatus() { + return lastSyncStatus; + } + + public Instant getLastSynced() { + return lastSynced; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(LAST_ACCESS_CONTROL_SYNC_ERROR.getPreferredName(), lastAccessControlSyncError); - builder.field(LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD.getPreferredName(), lastAccessControlSyncStatus); - builder.field(LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastAccessControlSyncScheduledAt); + if (lastAccessControlSyncError != null) { + builder.field(LAST_ACCESS_CONTROL_SYNC_ERROR.getPreferredName(), lastAccessControlSyncError); + } + if (lastAccessControlSyncStatus != null) { + builder.field(LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD.getPreferredName(), lastAccessControlSyncStatus); + } + if (lastAccessControlSyncScheduledAt != null) { + builder.field(LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastAccessControlSyncScheduledAt); + } if (lastDeletedDocumentCount != null) { builder.field(LAST_DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastDeletedDocumentCount); } - builder.field(LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastIncrementalSyncScheduledAt); + if (lastIncrementalSyncScheduledAt != null) { + builder.field(LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastIncrementalSyncScheduledAt); + } if (lastIndexedDocumentCount != null) { builder.field(LAST_INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastIndexedDocumentCount); } - builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); - builder.field(LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastSyncScheduledAt); - builder.field(LAST_SYNC_STATUS_FIELD.getPreferredName(), lastSyncStatus); - builder.field(LAST_SYNCED_FIELD.getPreferredName(), lastSynced); + if (lastSyncError != null) { + builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); + } + if (lastSyncScheduledAt != null) { + builder.field(LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastSyncScheduledAt); + } + if (lastSyncStatus != null) { + builder.field(LAST_SYNC_STATUS_FIELD.getPreferredName(), lastSyncStatus); + } + if (lastSynced != null) { + builder.field(LAST_SYNCED_FIELD.getPreferredName(), lastSynced); + } return builder; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 522ae4b5dffc4..045cb725e477c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -474,6 +474,48 @@ public void testUpdateConnectorLastSyncStats() throws Exception { assertThat(syncStats, equalTo(indexedConnector.getSyncInfo())); } + public void testUpdateConnectorLastSyncStats_withPartialUpdate() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); + + ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorSyncInfo syncStats = new ConnectorSyncInfo.Builder().setLastSyncError(randomAlphaOfLengthBetween(5, 10)) + .setLastIndexedDocumentCount(randomLong()) + .setLastDeletedDocumentCount(randomLong()) + .build(); + + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); + + DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connectorId); + + // Check fields from the partial update of last sync stats + assertThat(syncStats.getLastSyncError(), equalTo(indexedConnector.getSyncInfo().getLastSyncError())); + assertThat(syncStats.getLastDeletedDocumentCount(), equalTo(indexedConnector.getSyncInfo().getLastDeletedDocumentCount())); + assertThat(syncStats.getLastIndexedDocumentCount(), equalTo(indexedConnector.getSyncInfo().getLastIndexedDocumentCount())); + + ConnectorSyncInfo nextSyncStats = new ConnectorSyncInfo.Builder().setLastIndexedDocumentCount(randomLong()).build(); + + lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, nextSyncStats); + + updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + indexedConnector = awaitGetConnector(connectorId); + + // Check fields from the partial update of last sync stats + assertThat(nextSyncStats.getLastIndexedDocumentCount(), equalTo(indexedConnector.getSyncInfo().getLastIndexedDocumentCount())); + + // Check that other fields remained unchanged + assertThat(syncStats.getLastSyncError(), equalTo(indexedConnector.getSyncInfo().getLastSyncError())); + assertThat(syncStats.getLastDeletedDocumentCount(), equalTo(indexedConnector.getSyncInfo().getLastDeletedDocumentCount())); + + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); String connectorId = randomUUID(); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index 8ed18fc303498..53efc2d0363c6 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -276,7 +276,9 @@ public void testToContent_WithNullValues() throws IOException { "last_access_control_sync_error": null, "last_access_control_sync_scheduled_at": null, "last_access_control_sync_status": null, + "last_deleted_document_count":null, "last_incremental_sync_scheduled_at": null, + "last_indexed_document_count":null, "last_seen": null, "last_sync_error": null, "last_sync_scheduled_at": null, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java index 7c14c425e5e13..980a0312c29f0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expressions.java @@ -7,18 +7,11 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.core.Tuple; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.AttributeInput; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.ConstantInput; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.type.DataTypes; import java.util.ArrayList; import java.util.Collection; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Set; import java.util.function.Predicate; import static java.util.Collections.emptyList; @@ -182,59 +175,6 @@ public static List> aliases(List output) { - for (Attribute attribute : output) { - if (attribute instanceof ReferenceAttribute) { - return true; - } - } - return false; - } - - public static List onlyPrimitiveFieldAttributes(Collection attributes) { - List filtered = new ArrayList<>(); - // add only primitives - // but filter out multi fields (allow only the top-level value) - Set seenMultiFields = new LinkedHashSet<>(); - - for (Attribute a : attributes) { - if (DataTypes.isUnsupported(a.dataType()) == false && DataTypes.isPrimitive(a.dataType())) { - if (a instanceof FieldAttribute fa) { - // skip nested fields and seen multi-fields - if (fa.isNested() == false && seenMultiFields.contains(fa.parent()) == false) { - filtered.add(a); - seenMultiFields.add(a); - } - } else { - filtered.add(a); - } - } - } - - return filtered; - } - - public static Pipe pipe(Expression e) { - if (e.foldable()) { - return new ConstantInput(e.source(), e, e.fold()); - } - if (e instanceof NamedExpression ne) { - return new AttributeInput(e.source(), e, ne.toAttribute()); - } - if (e instanceof Function f) { - return f.asPipe(); - } - throw new QlIllegalArgumentException("Cannot create pipe for {}", e); - } - - public static List pipe(List expressions) { - List pipes = new ArrayList<>(expressions.size()); - for (Expression e : expressions) { - pipes.add(pipe(e)); - } - return pipes; - } - public static String id(Expression e) { return Integer.toHexString(e.hashCode()); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java index c0d493fcc8a82..cad5c631088f2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.ConstantInput; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.List; @@ -26,8 +24,6 @@ public abstract class Function extends Expression { private final String functionName = getClass().getSimpleName().toUpperCase(Locale.ROOT); - private Pipe lazyPipe = null; - // TODO: Functions supporting distinct should add a dedicated constructor Location, List, boolean protected Function(Source source, List children) { super(source, children); @@ -65,17 +61,6 @@ public boolean equals(Object obj) { return Objects.equals(children(), other.children()); } - public Pipe asPipe() { - if (lazyPipe == null) { - lazyPipe = foldable() ? new ConstantInput(source(), this, fold()) : makePipe(); - } - return lazyPipe; - } - - protected Pipe makePipe() { - throw new UnsupportedOperationException(); - } - @Override public String nodeString() { StringJoiner sj = new StringJoiner(",", functionName() + "(", ")"); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BaseSurrogateFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BaseSurrogateFunction.java index 8343f3aeff06e..efbcc4f869620 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BaseSurrogateFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BaseSurrogateFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.core.expression.function.scalar; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.List; @@ -45,8 +44,4 @@ public Object fold() { return substitute().fold(); } - @Override - protected Pipe makePipe() { - return substitute().asPipe(); - } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java index 9cac6ad69a40b..2ef0b892138de 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.esql.core.expression.function.scalar; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -42,11 +39,6 @@ public Expression field() { return field; } - @Override - public final Pipe makePipe() { - return new UnaryPipe(source(), this, Expressions.pipe(field()), makeProcessor()); - } - protected abstract Processor makeProcessor(); @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWith.java deleted file mode 100644 index 3e4852ad29d70..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWith.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.function.scalar.string; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypes; - -import java.util.Arrays; - -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStringAndExact; -import static org.elasticsearch.xpack.esql.core.expression.function.scalar.string.StartsWithFunctionProcessor.doProcess; - -/** - * Function that checks if first parameter starts with the second parameter. Both parameters should be strings - * and the function returns a boolean value. - */ -public abstract class StartsWith extends CaseInsensitiveScalarFunction { - - private final Expression input; - private final Expression pattern; - - public StartsWith(Source source, Expression input, Expression pattern, boolean caseInsensitive) { - super(source, Arrays.asList(input, pattern), caseInsensitive); - this.input = input; - this.pattern = pattern; - } - - @Override - protected TypeResolution resolveType() { - if (childrenResolved() == false) { - return new TypeResolution("Unresolved children"); - } - - TypeResolution fieldResolution = isStringAndExact(input, sourceText(), FIRST); - if (fieldResolution.unresolved()) { - return fieldResolution; - } - - return isStringAndExact(pattern, sourceText(), SECOND); - } - - public Expression input() { - return input; - } - - public Expression pattern() { - return pattern; - } - - @Override - public Pipe makePipe() { - return new StartsWithFunctionPipe(source(), this, Expressions.pipe(input), Expressions.pipe(pattern), isCaseInsensitive()); - } - - @Override - public boolean foldable() { - return input.foldable() && pattern.foldable(); - } - - @Override - public Object fold() { - return doProcess(input.fold(), pattern.fold(), isCaseInsensitive()); - } - - @Override - public DataType dataType() { - return DataTypes.BOOLEAN; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithFunctionPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithFunctionPipe.java deleted file mode 100644 index 849c7d987200a..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithFunctionPipe.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.function.scalar.string; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class StartsWithFunctionPipe extends Pipe { - - private final Pipe input; - private final Pipe pattern; - private final boolean isCaseSensitive; - - public StartsWithFunctionPipe(Source source, Expression expression, Pipe input, Pipe pattern, boolean isCaseSensitive) { - super(source, expression, Arrays.asList(input, pattern)); - this.input = input; - this.pattern = pattern; - this.isCaseSensitive = isCaseSensitive; - } - - @Override - public final Pipe replaceChildren(List newChildren) { - return replaceChildren(newChildren.get(0), newChildren.get(1)); - } - - @Override - public final Pipe resolveAttributes(AttributeResolver resolver) { - Pipe newField = input.resolveAttributes(resolver); - Pipe newPattern = pattern.resolveAttributes(resolver); - if (newField == input && newPattern == pattern) { - return this; - } - return replaceChildren(newField, newPattern); - } - - @Override - public boolean supportedByAggsOnlyQuery() { - return input.supportedByAggsOnlyQuery() && pattern.supportedByAggsOnlyQuery(); - } - - @Override - public boolean resolved() { - return input.resolved() && pattern.resolved(); - } - - protected Pipe replaceChildren(Pipe newField, Pipe newPattern) { - return new StartsWithFunctionPipe(source(), expression(), newField, newPattern, isCaseSensitive); - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - input.collectFields(sourceBuilder); - pattern.collectFields(sourceBuilder); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, StartsWithFunctionPipe::new, expression(), input, pattern, isCaseSensitive); - } - - @Override - public StartsWithFunctionProcessor asProcessor() { - return new StartsWithFunctionProcessor(input.asProcessor(), pattern.asProcessor(), isCaseSensitive); - } - - public Pipe input() { - return input; - } - - public Pipe pattern() { - return pattern; - } - - public boolean isCaseSensitive() { - return isCaseSensitive; - } - - @Override - public int hashCode() { - return Objects.hash(input, pattern, isCaseSensitive); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - StartsWithFunctionPipe other = (StartsWithFunctionPipe) obj; - return Objects.equals(input, other.input) - && Objects.equals(pattern, other.pattern) - && Objects.equals(isCaseSensitive, other.isCaseSensitive); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggExtractorInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggExtractorInput.java deleted file mode 100644 index 851bb0cd2053e..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggExtractorInput.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.execution.search.extractor.BucketExtractor; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.BucketExtractorProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ChainingProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -public class AggExtractorInput extends LeafInput { - - private final Processor chained; - - public AggExtractorInput(Source source, Expression expression, Processor processor, BucketExtractor context) { - super(source, expression, context); - this.chained = processor; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, AggExtractorInput::new, expression(), chained, context()); - } - - @Override - public Processor asProcessor() { - Processor proc = new BucketExtractorProcessor(context()); - return chained != null ? new ChainingProcessor(proc, chained) : proc; - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return true; - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - // Nothing to collect - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggNameInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggNameInput.java deleted file mode 100644 index ee8cc02ea2d07..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggNameInput.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -public class AggNameInput extends CommonNonExecutableInput { - public AggNameInput(Source source, Expression expression, String context) { - super(source, expression, context); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, AggNameInput::new, expression(), context()); - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return true; - } - - @Override - public final boolean resolved() { - return false; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggPathInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggPathInput.java deleted file mode 100644 index 226bc818a7919..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AggPathInput.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.AggRef; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Objects; - -public class AggPathInput extends CommonNonExecutableInput { - - // used in case the agg itself is not returned in a suitable format (like date aggs) - private final Processor action; - - public AggPathInput(Expression expression, AggRef context) { - this(Source.EMPTY, expression, context, null); - } - - /** - * - * Constructs a new AggPathInput instance. - * The action is used for handling corner-case results such as date histogram which returns - * a full date object for year which requires additional extraction. - */ - public AggPathInput(Source source, Expression expression, AggRef context, Processor action) { - super(source, expression, context); - this.action = action; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, AggPathInput::new, expression(), context(), action); - } - - public Processor action() { - return action; - } - - @Override - public boolean resolved() { - return true; - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return true; - } - - @Override - public int hashCode() { - return Objects.hash(context(), action); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - AggPathInput other = (AggPathInput) obj; - return Objects.equals(context(), other.context()) && Objects.equals(action, other.action); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AttributeInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AttributeInput.java deleted file mode 100644 index 5f25dfaea9277..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AttributeInput.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * An input that must first be rewritten against the rest of the query - * before it can be further processed. - */ -public class AttributeInput extends NonExecutableInput { - public AttributeInput(Source source, Expression expression, Attribute context) { - super(source, expression, context); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, AttributeInput::new, expression(), context()); - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return true; - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return new ReferenceInput(source(), expression(), resolver.resolve(context())); - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - // Nothing to extract - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/BinaryPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/BinaryPipe.java deleted file mode 100644 index a1f8a2b8e6448..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/BinaryPipe.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public abstract class BinaryPipe extends Pipe { - - private final Pipe left, right; - - public BinaryPipe(Source source, Expression expression, Pipe left, Pipe right) { - super(source, expression, Arrays.asList(left, right)); - this.left = left; - this.right = right; - } - - @Override - public final Pipe replaceChildren(List newChildren) { - return replaceChildren(newChildren.get(0), newChildren.get(1)); - } - - public Pipe left() { - return left; - } - - public Pipe right() { - return right; - } - - @Override - public boolean supportedByAggsOnlyQuery() { - return left.supportedByAggsOnlyQuery() || right.supportedByAggsOnlyQuery(); - } - - @Override - public final Pipe resolveAttributes(AttributeResolver resolver) { - Pipe newLeft = left.resolveAttributes(resolver); - Pipe newRight = right.resolveAttributes(resolver); - if (newLeft == left && newRight == right) { - return this; - } - return replaceChildren(newLeft, newRight); - } - - /** - * Build a copy of this object with new left and right children. Used by - * {@link #resolveAttributes(AttributeResolver)}. - */ - protected abstract BinaryPipe replaceChildren(Pipe left, Pipe right); - - @Override - public boolean resolved() { - return left().resolved() && right().resolved(); - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - left.collectFields(sourceBuilder); - right.collectFields(sourceBuilder); - } - - @Override - public int hashCode() { - return Objects.hash(left(), right()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - BinaryPipe other = (BinaryPipe) obj; - return Objects.equals(left(), other.left()) && Objects.equals(right(), other.right()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/CommonNonExecutableInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/CommonNonExecutableInput.java deleted file mode 100644 index c6f96f7062db5..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/CommonNonExecutableInput.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.Source; - -/** - * Implementation common to most subclasses of - * {@link NonExecutableInput} but not shared by all. - */ -abstract class CommonNonExecutableInput extends NonExecutableInput { - CommonNonExecutableInput(Source source, Expression expression, T context) { - super(source, expression, context); - } - - @Override - public final Processor asProcessor() { - throw new QlIllegalArgumentException("Unresolved input - needs resolving first"); - } - - @Override - public final Pipe resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - // Nothing to extract - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/ConstantInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/ConstantInput.java deleted file mode 100644 index 560db5d43cad2..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/ConstantInput.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -public class ConstantInput extends LeafInput { - - public ConstantInput(Source source, Expression expression, Object context) { - super(source, expression, context); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, ConstantInput::new, expression(), context()); - } - - @Override - public Processor asProcessor() { - return new ConstantProcessor(context()); - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return false; - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - // Nothing to collect - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/HitExtractorInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/HitExtractorInput.java deleted file mode 100644 index b51d164d5c5d4..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/HitExtractorInput.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.execution.search.extractor.HitExtractor; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.HitExtractorProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -public class HitExtractorInput extends LeafInput { - - public HitExtractorInput(Source source, Expression expression, HitExtractor context) { - super(source, expression, context); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, HitExtractorInput::new, expression(), context()); - } - - @Override - public Processor asProcessor() { - return new HitExtractorProcessor(context()); - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return true; - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - // No fields to collect - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/LeafInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/LeafInput.java deleted file mode 100644 index cf6c99a74c39f..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/LeafInput.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; -import java.util.Objects; - -import static java.util.Collections.emptyList; - -public abstract class LeafInput extends Pipe { - - private T context; - - public LeafInput(Source source, Expression expression, T context) { - super(source, expression, emptyList()); - this.context = context; - } - - @Override - public final Pipe replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - public T context() { - return context; - } - - @Override - public boolean resolved() { - return true; - } - - @Override - public int hashCode() { - return Objects.hash(expression(), context); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - LeafInput other = (LeafInput) obj; - return Objects.equals(context(), other.context()) && Objects.equals(expression(), other.expression()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/MultiPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/MultiPipe.java deleted file mode 100644 index c75437c6cbe81..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/MultiPipe.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.ArrayList; -import java.util.List; - -public abstract class MultiPipe extends Pipe { - - protected MultiPipe(Source source, Expression expression, List children) { - super(source, expression, children); - } - - @Override - public Processor asProcessor() { - List procs = new ArrayList<>(); - for (Pipe pipe : children()) { - procs.add(pipe.asProcessor()); - } - - return asProcessor(procs); - } - - public abstract Processor asProcessor(List procs); -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/NonExecutableInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/NonExecutableInput.java deleted file mode 100644 index 74b522c8a6178..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/NonExecutableInput.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.Source; - -public abstract class NonExecutableInput extends LeafInput { - NonExecutableInput(Source source, Expression expression, T context) { - super(source, expression, context); - } - - @Override - public boolean resolved() { - return false; - } - - @Override - public Processor asProcessor() { - throw new QlIllegalArgumentException("Unresolved input - needs resolving first"); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/Pipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/Pipe.java deleted file mode 100644 index 4883d0abd028e..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/Pipe.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; -import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; -import org.elasticsearch.xpack.esql.core.execution.search.FieldExtraction; -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.ArrayList; -import java.util.List; - -/** - * Processing pipe for an expression (tree). Used for local execution of expressions - * on the invoking node. - * For example, the {@code Pipe} of: - * - * ABS(MAX(foo)) + CAST(bar) - * - * Is an {@code Add} operator with left {@code ABS} over an aggregate (MAX), and - * right being a {@code CAST} function. - */ -public abstract class Pipe extends Node implements FieldExtraction, Resolvable { - - private final Expression expression; - - public Pipe(Source source, Expression expression, List children) { - super(source, children); - this.expression = expression; - } - - public Expression expression() { - return expression; - } - - @Override - public boolean resolved() { - return Resolvables.resolved(children()); - } - - @Override - public void collectFields(QlSourceBuilder sourceBuilder) { - children().forEach(c -> c.collectFields(sourceBuilder)); - } - - @Override - public boolean supportedByAggsOnlyQuery() { - return children().stream().anyMatch(Pipe::supportedByAggsOnlyQuery); - } - - public abstract Processor asProcessor(); - - /** - * Resolve {@link Attribute}s which are unprocessable into - * {@link Pipe}s that are. - * - * @return {@code this} if the resolution doesn't change the - * definition, a new {@link Pipe} otherwise - */ - public Pipe resolveAttributes(AttributeResolver resolver) { - List newPipes = new ArrayList<>(children().size()); - for (Pipe p : children()) { - newPipes.add(p.resolveAttributes(resolver)); - } - - return children().equals(newPipes) ? this : replaceChildrenSameSize(newPipes); - } - - public interface AttributeResolver { - FieldExtraction resolve(Attribute attribute); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/ReferenceInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/ReferenceInput.java deleted file mode 100644 index 09db67c9bceb3..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/ReferenceInput.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.FieldExtraction; -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -public class ReferenceInput extends NonExecutableInput { - public ReferenceInput(Source source, Expression expression, FieldExtraction context) { - super(source, expression, context); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, ReferenceInput::new, expression(), context()); - } - - @Override - public final boolean supportedByAggsOnlyQuery() { - return false; - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public final void collectFields(QlSourceBuilder sourceBuilder) { - context().collectFields(sourceBuilder); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/UnaryPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/UnaryPipe.java deleted file mode 100644 index 75257e7b3a4c7..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/UnaryPipe.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ChainingProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; -import java.util.Objects; - -import static java.util.Collections.singletonList; - -public final class UnaryPipe extends Pipe { - - private final Pipe child; - private final Processor action; - - public UnaryPipe(Source source, Expression expression, Pipe child, Processor action) { - super(source, expression, singletonList(child)); - this.child = child; - this.action = action; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, UnaryPipe::new, expression(), child, action); - } - - @Override - public Pipe replaceChildren(List newChildren) { - return new UnaryPipe(source(), expression(), newChildren.get(0), action); - } - - public Pipe child() { - return child; - } - - public Processor action() { - return action; - } - - @Override - public boolean resolved() { - return child.resolved(); - } - - @Override - public Processor asProcessor() { - return new ChainingProcessor(child.asProcessor(), action); - } - - @Override - public boolean supportedByAggsOnlyQuery() { - return child.supportedByAggsOnlyQuery(); - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - Pipe newChild = child.resolveAttributes(resolver); - if (newChild == child) { - return this; - } - return new UnaryPipe(source(), expression(), newChild, action); - } - - @Override - public void collectFields(QlSourceBuilder sourceBuilder) { - child.collectFields(sourceBuilder); - } - - @Override - public int hashCode() { - return Objects.hash(expression(), child, action); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - UnaryPipe other = (UnaryPipe) obj; - return Objects.equals(action, other.action) - && Objects.equals(child, other.child) - && Objects.equals(expression(), other.expression()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java index 4187ceba49089..9d0dd9b161b4d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java @@ -7,14 +7,8 @@ package org.elasticsearch.xpack.esql.core.expression.predicate; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicPipe; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonPipe; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -136,26 +130,6 @@ public DataType dataType() { return DataTypes.BOOLEAN; } - @Override - protected Pipe makePipe() { - BinaryComparisonPipe lowerPipe = new BinaryComparisonPipe( - source(), - this, - Expressions.pipe(value()), - Expressions.pipe(lower()), - includeLower() ? BinaryComparisonOperation.GTE : BinaryComparisonOperation.GT - ); - BinaryComparisonPipe upperPipe = new BinaryComparisonPipe( - source(), - this, - Expressions.pipe(value()), - Expressions.pipe(upper()), - includeUpper() ? BinaryComparisonOperation.LTE : BinaryComparisonOperation.LT - ); - BinaryLogicPipe and = new BinaryLogicPipe(source(), this, lowerPipe, upperPipe, BinaryLogicOperation.AND); - return and; - } - @Override public int hashCode() { return Objects.hash(includeLower, includeUpper, value, lower, upper, zoneId); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java index 960933fabaf7f..a7145a7acfe29 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -35,11 +33,6 @@ protected TypeResolution resolveInputType(Expression e, ParamOrdinal paramOrdina return isBoolean(e, sourceText(), paramOrdinal); } - @Override - protected Pipe makePipe() { - return new BinaryLogicPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); - } - @Override public Nullability nullable() { // Cannot fold null due to 3vl, constant folding will do any possible folding. diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicPipe.java deleted file mode 100644 index ec4a3a7684ca7..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicPipe.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.BinaryPipe; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Objects; - -public class BinaryLogicPipe extends BinaryPipe { - - private final BinaryLogicOperation operation; - - public BinaryLogicPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryLogicOperation operation) { - super(source, expression, left, right); - this.operation = operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryLogicPipe::new, expression(), left(), right(), operation); - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryLogicPipe(source(), expression(), left, right, operation); - } - - @Override - public BinaryLogicProcessor asProcessor() { - return new BinaryLogicProcessor(left().asProcessor(), right().asProcessor(), operation); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), operation); - } - - @Override - public boolean equals(Object obj) { - if (super.equals(obj)) { - BinaryLogicPipe other = (BinaryLogicPipe) obj; - return Objects.equals(operation, other.operation); - } - return false; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/ArithmeticOperation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/ArithmeticOperation.java index 5e2e256a410d5..8dc0f58083179 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/ArithmeticOperation.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/ArithmeticOperation.java @@ -7,9 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -42,9 +40,4 @@ public DataType dataType() { } return dataType; } - - @Override - protected Pipe makePipe() { - return new BinaryArithmeticPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); - } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java deleted file mode 100644 index f6b75798ce90c..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.BinaryPipe; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Objects; - -public class BinaryArithmeticPipe extends BinaryPipe { - - private final BinaryArithmeticOperation operation; - - public BinaryArithmeticPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryArithmeticOperation operation) { - super(source, expression, left, right); - this.operation = operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryArithmeticPipe::new, expression(), left(), right(), operation); - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryArithmeticPipe(source(), expression(), left, right, operation); - } - - @Override - public Processor asProcessor() { - return new BinaryArithmeticProcessor(left().asProcessor(), right().asProcessor(), operation); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), operation); - } - - @Override - public boolean equals(Object obj) { - if (super.equals(obj)) { - BinaryArithmeticPipe other = (BinaryArithmeticPipe) obj; - return Objects.equals(operation, other.operation); - } - return false; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java index f6e7ae4cb0fa7..a9c4fd439a1ff 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -43,11 +41,6 @@ public DataType dataType() { return DataTypes.BOOLEAN; } - @Override - protected Pipe makePipe() { - return new BinaryComparisonPipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), function()); - } - public static Integer compare(Object left, Object right) { return Comparisons.compare(left, right); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonPipe.java deleted file mode 100644 index 0cf0fc5ce5443..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonPipe.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.BinaryPipe; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Objects; - -public class BinaryComparisonPipe extends BinaryPipe { - - private final BinaryComparisonOperation operation; - - public BinaryComparisonPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryComparisonOperation operation) { - super(source, expression, left, right); - this.operation = operation; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, BinaryComparisonPipe::new, expression(), left(), right(), operation); - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new BinaryComparisonPipe(source(), expression(), left, right, operation); - } - - @Override - public BinaryComparisonProcessor asProcessor() { - return new BinaryComparisonProcessor(left().asProcessor(), right().asProcessor(), operation); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), operation); - } - - @Override - public boolean equals(Object obj) { - if (super.equals(obj)) { - BinaryComparisonPipe other = (BinaryComparisonPipe) obj; - return Objects.equals(operation, other.operation); - } - return false; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java index e03e1ddb1e865..ff188cb30d7d6 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -26,7 +25,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; @@ -115,11 +113,6 @@ protected boolean areCompatible(DataType left, DataType right) { return DataTypes.areCompatible(left, right); } - @Override - protected Pipe makePipe() { - return new InPipe(source(), this, children().stream().map(Expressions::pipe).collect(Collectors.toList())); - } - @Override protected TypeResolution resolveType() { TypeResolution resolution = TypeResolutions.isExact(value, functionName(), DEFAULT); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InPipe.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InPipe.java deleted file mode 100644 index e667a04f83ca7..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InPipe.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.MultiPipe; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; -import java.util.Objects; - -public class InPipe extends MultiPipe { - - public InPipe(Source source, Expression expression, List pipes) { - super(source, expression, pipes); - } - - @Override - public final Pipe replaceChildren(List newChildren) { - return new InPipe(source(), expression(), newChildren); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, InPipe::new, expression(), children()); - } - - @Override - public int hashCode() { - return Objects.hash(children()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - InPipe other = (InPipe) obj; - return Objects.equals(children(), other.children()); - } - - @Override - public InProcessor asProcessor(List processors) { - return new InProcessor(processors); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index 568baad7e5bfa..39674a8a5a8b0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -16,8 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; @@ -45,7 +43,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.MatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.PrefixQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.RangeQuery; @@ -80,37 +77,6 @@ public final class ExpressionTranslators { public static final String DATE_FORMAT = "strict_date_optional_time_nanos"; public static final String TIME_FORMAT = "strict_hour_minute_second_fraction"; - public static final List> QUERY_TRANSLATORS = List.of( - new BinaryComparisons(), - new Ranges(), - new BinaryLogic(), - new IsNulls(), - new IsNotNulls(), - new Nots(), - new Likes(), - new InComparisons(), - new StringQueries(), - new Matches(), - new MultiMatches(), - new Scalars() - ); - - public static Query toQuery(Expression e) { - return toQuery(e, new QlTranslatorHandler()); - } - - public static Query toQuery(Expression e, TranslatorHandler handler) { - Query translation = null; - for (ExpressionTranslator translator : QUERY_TRANSLATORS) { - translation = translator.translate(e, handler); - if (translation != null) { - return translation; - } - } - - throw new QlIllegalArgumentException("Don't know how to translate {} {}", e.nodeName(), e); - } - public static Object valueOf(Expression e) { if (e.foldable()) { return e.fold(); @@ -449,39 +415,11 @@ private static Query translate(In in, TranslatorHandler handler) { } } - public static class Scalars extends ExpressionTranslator { - - @Override - protected Query asQuery(ScalarFunction f, TranslatorHandler handler) { - return doTranslate(f, handler); - } - - public static Query doTranslate(ScalarFunction f, TranslatorHandler handler) { - Query q = doKnownTranslate(f, handler); - if (q != null) { - return q; - } - throw new QlIllegalArgumentException("Cannot translate expression:[" + f.sourceText() + "]"); - } - - public static Query doKnownTranslate(ScalarFunction f, TranslatorHandler handler) { - if (f instanceof StartsWith sw) { - if (sw.input() instanceof FieldAttribute && sw.pattern().foldable()) { - String targetFieldName = handler.nameOf(((FieldAttribute) sw.input()).exactAttribute()); - String pattern = (String) sw.pattern().fold(); - - return new PrefixQuery(f.source(), targetFieldName, pattern, sw.isCaseInsensitive()); - } - } - return null; - } - } - - public static Query or(Source source, Query left, Query right) { + private static Query or(Source source, Query left, Query right) { return boolQuery(source, left, right, false); } - public static Query and(Source source, Query left, Query right) { + private static Query and(Source source, Query left, Query right) { return boolQuery(source, left, right, true); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/QlTranslatorHandler.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/QlTranslatorHandler.java deleted file mode 100644 index e754681892b70..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/QlTranslatorHandler.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.planner; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; - -public class QlTranslatorHandler implements TranslatorHandler { - - @Override - public Query asQuery(Expression e) { - return ExpressionTranslators.toQuery(e, this); - } - - @Override - public String nameOf(Expression e) { - return Expressions.name(e); - } - - @Override - public Object convert(Object value, DataType dataType) { - return DataTypeConverter.convert(value, dataType); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java index f014d2d3e3fd0..c7ab9731cb8dc 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ConfigurationFunction; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -202,11 +201,6 @@ public Expression replaceChildren(List newChildren) { public DataType dataType() { return null; } - - @Override - protected Pipe makePipe() { - return null; - } } public static class DummyFunction2 extends DummyFunction { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithFunctionPipeTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithFunctionPipeTests.java deleted file mode 100644 index 8ddf3a84fc1fe..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithFunctionPipeTests.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.function.scalar.string; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.Combinations; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.tree.SourceTests; - -import java.util.ArrayList; -import java.util.BitSet; -import java.util.List; -import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xpack.esql.core.expression.Expressions.pipe; -import static org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.randomStringLiteral; -import static org.elasticsearch.xpack.esql.core.tree.SourceTests.randomSource; - -public class StartsWithFunctionPipeTests extends AbstractNodeTestCase { - - public static class StartsWithTest extends StartsWith { - public StartsWithTest(Source source, Expression input, Expression pattern, boolean caseInsensitive) { - super(source, input, pattern, caseInsensitive); - } - - @Override - public Expression replaceChildren(List newChildren) { - return new StartsWithTest(source(), newChildren.get(0), newChildren.get(1), isCaseInsensitive()); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, StartsWithTest::new, input(), pattern(), isCaseInsensitive()); - } - } - - @Override - protected StartsWithFunctionPipe randomInstance() { - return randomStartsWithFunctionPipe(); - } - - private Expression randomStartsWithFunctionExpression() { - return randomStartsWithFunctionPipe().expression(); - } - - public static StartsWithFunctionPipe randomStartsWithFunctionPipe() { - return (StartsWithFunctionPipe) new StartsWithTest(randomSource(), randomStringLiteral(), randomStringLiteral(), randomBoolean()) - .makePipe(); - } - - @Override - public void testTransform() { - // test transforming only the properties (source, expression), - // skipping the children (the two parameters of the binary function) which are tested separately - StartsWithFunctionPipe b1 = randomInstance(); - Expression newExpression = randomValueOtherThan(b1.expression(), this::randomStartsWithFunctionExpression); - StartsWithFunctionPipe newB = new StartsWithFunctionPipe( - b1.source(), - newExpression, - b1.input(), - b1.pattern(), - b1.isCaseSensitive() - ); - - assertEquals(newB, b1.transformPropertiesOnly(Expression.class, v -> Objects.equals(v, b1.expression()) ? newExpression : v)); - - StartsWithFunctionPipe b2 = randomInstance(); - Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource); - newB = new StartsWithFunctionPipe(newLoc, b2.expression(), b2.input(), b2.pattern(), b2.isCaseSensitive()); - - assertEquals(newB, b2.transformPropertiesOnly(Source.class, v -> Objects.equals(v, b2.source()) ? newLoc : v)); - } - - @Override - public void testReplaceChildren() { - StartsWithFunctionPipe b = randomInstance(); - Pipe newInput = randomValueOtherThan(b.input(), () -> pipe(randomStringLiteral())); - Pipe newPattern = randomValueOtherThan(b.pattern(), () -> pipe(randomStringLiteral())); - - StartsWithFunctionPipe newB = new StartsWithFunctionPipe(b.source(), b.expression(), b.input(), b.pattern(), b.isCaseSensitive()); - StartsWithFunctionPipe transformed = (StartsWithFunctionPipe) newB.replaceChildren(newInput, b.pattern()); - assertEquals(transformed.input(), newInput); - assertEquals(transformed.source(), b.source()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.pattern(), b.pattern()); - - transformed = (StartsWithFunctionPipe) newB.replaceChildren(b.input(), newPattern); - assertEquals(transformed.input(), b.input()); - assertEquals(transformed.source(), b.source()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.pattern(), newPattern); - - transformed = (StartsWithFunctionPipe) newB.replaceChildren(newInput, newPattern); - assertEquals(transformed.input(), newInput); - assertEquals(transformed.source(), b.source()); - assertEquals(transformed.expression(), b.expression()); - assertEquals(transformed.pattern(), newPattern); - } - - @Override - protected StartsWithFunctionPipe mutate(StartsWithFunctionPipe instance) { - List> randoms = new ArrayList<>(); - for (int i = 1; i < 4; i++) { - for (BitSet comb : new Combinations(3, i)) { - randoms.add( - f -> new StartsWithFunctionPipe( - f.source(), - f.expression(), - comb.get(0) ? randomValueOtherThan(f.input(), () -> pipe(randomStringLiteral())) : f.input(), - comb.get(1) ? randomValueOtherThan(f.pattern(), () -> pipe(randomStringLiteral())) : f.pattern(), - comb.get(2) ? randomValueOtherThan(f.isCaseSensitive(), ESTestCase::randomBoolean) : f.isCaseSensitive() - ) - ); - } - } - - return randomFrom(randoms).apply(instance); - } - - @Override - protected StartsWithFunctionPipe copy(StartsWithFunctionPipe instance) { - return new StartsWithFunctionPipe( - instance.source(), - instance.expression(), - instance.input(), - instance.pattern(), - instance.isCaseSensitive() - ); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithProcessorTests.java deleted file mode 100644 index 23a52082b9b16..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/string/StartsWithProcessorTests.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.function.scalar.string; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.LiteralTests; -import org.hamcrest.Matchers; - -import static org.elasticsearch.xpack.esql.core.expression.function.scalar.FunctionTestUtils.l; -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; - -public class StartsWithProcessorTests extends ESTestCase { - - public void testSensitiveStartsWithFunctionWithValidInput() { - assertTrue(sensitiveStartsWith("foobarbar", "f")); - assertFalse(sensitiveStartsWith("foobar", "bar")); - assertFalse(sensitiveStartsWith("foo", "foobar")); - assertTrue(sensitiveStartsWith("foobar", "")); - assertTrue(sensitiveStartsWith("foo", "foo")); - assertTrue(sensitiveStartsWith("FOoBar", "FOo")); - assertFalse(sensitiveStartsWith("", "bar")); - assertNull(sensitiveStartsWith(null, "bar")); - assertNull(sensitiveStartsWith("foo", null)); - assertNull(sensitiveStartsWith(null, null)); - - assertFalse(sensitiveStartsWith("foo", "FO")); - assertFalse(sensitiveStartsWith("foo", "FOo")); - } - - private Boolean sensitiveStartsWith(String left, String right) { - return startsWith(false, left, right); - } - - public void testInsensitiveStartsWithFunctionWithValidInput() { - assertTrue(insensitiveStartsWith("foobarbar", "f")); - assertFalse(insensitiveStartsWith("foobar", "bar")); - assertFalse(insensitiveStartsWith("foo", "foobar")); - assertTrue(insensitiveStartsWith("foobar", "")); - assertTrue(insensitiveStartsWith("foo", "foo")); - assertTrue(insensitiveStartsWith("FOoBar", "FOo")); - assertFalse(insensitiveStartsWith("", "bar")); - assertNull(insensitiveStartsWith(null, "bar")); - assertNull(insensitiveStartsWith("foo", null)); - assertNull(insensitiveStartsWith(null, null)); - - assertTrue(insensitiveStartsWith("foo", "FO")); - assertTrue(insensitiveStartsWith("foo", "FOo")); - } - - private Boolean insensitiveStartsWith(String left, String right) { - return startsWith(true, left, right); - } - - private Boolean startsWith(boolean caseInsensitive, String left, String right) { - return (Boolean) new StartsWithFunctionPipeTests.StartsWithTest(EMPTY, l(left), l(right), caseInsensitive).makePipe() - .asProcessor() - .process(null); - } - - private Boolean untypedStartsWith(Object left, Object right) { - return (Boolean) new StartsWithFunctionPipeTests.StartsWithTest(EMPTY, l(left), l(right), randomBoolean()).makePipe() - .asProcessor() - .process(null); - } - - public void testStartsWithFunctionInputsValidation() { - QlIllegalArgumentException siae = expectThrows(QlIllegalArgumentException.class, () -> untypedStartsWith(5, "foo")); - assertEquals("A string/char is required; received [5]", siae.getMessage()); - siae = expectThrows(QlIllegalArgumentException.class, () -> untypedStartsWith("bar", false)); - assertEquals("A string/char is required; received [false]", siae.getMessage()); - } - - public void testStartsWithFunctionWithRandomInvalidDataType() { - Literal literal = randomValueOtherThanMany(v -> v.dataType() == KEYWORD, () -> LiteralTests.randomLiteral()); - QlIllegalArgumentException siae = expectThrows(QlIllegalArgumentException.class, () -> untypedStartsWith(literal, "foo")); - assertThat(siae.getMessage(), Matchers.startsWith("A string/char is required; received")); - siae = expectThrows(QlIllegalArgumentException.class, () -> untypedStartsWith("foo", literal)); - assertThat(siae.getMessage(), Matchers.startsWith("A string/char is required; received")); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AttributeInputTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AttributeInputTests.java deleted file mode 100644 index b101b26d71404..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/AttributeInputTests.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.execution.search.FieldExtraction; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; - -import static org.mockito.Mockito.mock; - -public class AttributeInputTests extends ESTestCase { - public void testResolveAttributes() { - FieldExtraction column = mock(FieldExtraction.class); - Expression expression = mock(Expression.class); - Attribute attribute = mock(Attribute.class); - - ReferenceInput expected = new ReferenceInput(expression.source(), expression, column); - - assertEquals(expected, new AttributeInput(expression.source(), expression, attribute).resolveAttributes(a -> { - assertSame(attribute, a); - return column; - })); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/BinaryPipesTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/BinaryPipesTests.java deleted file mode 100644 index a212245631d5b..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/BinaryPipesTests.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe.AttributeResolver; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; - -import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.esql.core.type.DataTypes.KEYWORD; - -public class BinaryPipesTests extends ESTestCase { - public void testSupportedByAggsOnlyQuery() { - Pipe supported = new DummyPipe(true); - Pipe unsupported = new DummyPipe(false); - - assertFalse(new DummyBinaryPipe(unsupported, unsupported).supportedByAggsOnlyQuery()); - assertTrue(new DummyBinaryPipe(unsupported, supported).supportedByAggsOnlyQuery()); - assertTrue(new DummyBinaryPipe(supported, unsupported).supportedByAggsOnlyQuery()); - assertTrue(new DummyBinaryPipe(supported, supported).supportedByAggsOnlyQuery()); - } - - public void testResolveAttributes() { - Pipe needsNothing = new DummyPipe(randomBoolean()); - Pipe resolvesTo = new DummyPipe(randomBoolean()); - Pipe needsResolution = new DummyPipe(randomBoolean()) { - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return resolvesTo; - } - }; - AttributeResolver resolver = a -> { - fail("not exepected"); - return null; - }; - - Pipe d = new DummyBinaryPipe(needsNothing, needsNothing); - assertSame(d, d.resolveAttributes(resolver)); - - d = new DummyBinaryPipe(needsNothing, needsResolution); - Pipe expected = new DummyBinaryPipe(needsNothing, resolvesTo); - assertEquals(expected, d.resolveAttributes(resolver)); - - d = new DummyBinaryPipe(needsResolution, needsNothing); - expected = new DummyBinaryPipe(resolvesTo, needsNothing); - assertEquals(expected, d.resolveAttributes(resolver)); - } - - public void testCollectFields() { - DummyPipe wantsScore = new DummyPipe(randomBoolean()) { - @Override - public void collectFields(QlSourceBuilder sourceBuilder) { - sourceBuilder.trackScores(); - } - }; - DummyPipe wantsNothing = new DummyPipe(randomBoolean()); - assertFalse(tracksScores(new DummyBinaryPipe(wantsNothing, wantsNothing))); - assertTrue(tracksScores(new DummyBinaryPipe(wantsScore, wantsNothing))); - assertTrue(tracksScores(new DummyBinaryPipe(wantsNothing, wantsScore))); - } - - /** - * Returns {@code true} if the processor defintion builds a query that - * tracks scores, {@code false} otherwise. Used for testing - * {@link Pipe#collectFields(QlSourceBuilder)}. - */ - static boolean tracksScores(Pipe d) { - QlSourceBuilder b = new QlSourceBuilder(); - d.collectFields(b); - SearchSourceBuilder source = new SearchSourceBuilder(); - b.build(source); - return source.trackScores(); - } - - public static BinaryPipe randomBinaryPipe() { - return new DummyBinaryPipe(randomUnaryPipe(), randomUnaryPipe()); - } - - public static Pipe randomUnaryPipe() { - return new ConstantInput(Source.EMPTY, new Literal(Source.EMPTY, randomAlphaOfLength(16), KEYWORD), randomAlphaOfLength(16)); - } - - public static final class DummyBinaryPipe extends BinaryPipe { - public DummyBinaryPipe(Pipe left, Pipe right) { - this(Source.EMPTY, left, right); - } - - public DummyBinaryPipe(Source source, Pipe left, Pipe right) { - super(source, null, left, right); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, DummyBinaryPipe::new, left(), right()); - } - - @Override - public Processor asProcessor() { - return null; - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new DummyBinaryPipe(source(), left, right); - } - } - - public static class DummyPipe extends Pipe { - private final boolean supportedByAggsOnlyQuery; - - public DummyPipe(boolean supportedByAggsOnlyQuery) { - this(Source.EMPTY, supportedByAggsOnlyQuery); - } - - public DummyPipe(Source source, boolean supportedByAggsOnlyQuery) { - super(source, null, emptyList()); - this.supportedByAggsOnlyQuery = supportedByAggsOnlyQuery; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, DummyPipe::new, supportedByAggsOnlyQuery); - } - - @Override - public Pipe replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - @Override - public boolean supportedByAggsOnlyQuery() { - return supportedByAggsOnlyQuery; - } - - @Override - public boolean resolved() { - return true; - } - - @Override - public Processor asProcessor() { - return null; - } - - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return this; - } - - @Override - public void collectFields(QlSourceBuilder sourceBuilder) {} - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/UnaryPipeTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/UnaryPipeTests.java deleted file mode 100644 index 81b1b497abf4f..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/pipeline/UnaryPipeTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.pipeline; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.execution.search.QlSourceBuilder; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.BinaryPipesTests.DummyPipe; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe.AttributeResolver; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import static org.elasticsearch.xpack.esql.core.expression.gen.pipeline.BinaryPipesTests.tracksScores; - -public class UnaryPipeTests extends ESTestCase { - public void testSupportedByAggsOnlyQuery() { - Pipe supported = new DummyPipe(true); - Pipe unsupported = new DummyPipe(false); - - assertFalse(newUnaryProcessor(unsupported).supportedByAggsOnlyQuery()); - assertTrue(newUnaryProcessor(supported).supportedByAggsOnlyQuery()); - } - - public void testResolveAttributes() { - Pipe needsNothing = new DummyPipe(randomBoolean()); - Pipe resolvesTo = new DummyPipe(randomBoolean()); - Pipe needsResolution = new DummyPipe(randomBoolean()) { - @Override - public Pipe resolveAttributes(AttributeResolver resolver) { - return resolvesTo; - } - }; - AttributeResolver resolver = a -> { - fail("not exepected"); - return null; - }; - - Pipe d = newUnaryProcessor(needsNothing); - assertSame(d, d.resolveAttributes(resolver)); - - d = newUnaryProcessor(needsResolution); - Pipe expected = newUnaryProcessor(resolvesTo); - assertEquals(expected, d.resolveAttributes(resolver)); - } - - public void testCollectFields() { - DummyPipe wantsScore = new DummyPipe(randomBoolean()) { - @Override - public void collectFields(QlSourceBuilder sourceBuilder) { - sourceBuilder.trackScores(); - } - }; - DummyPipe wantsNothing = new DummyPipe(randomBoolean()); - assertFalse(tracksScores(newUnaryProcessor(wantsNothing))); - assertTrue(tracksScores(newUnaryProcessor(wantsScore))); - } - - private Pipe newUnaryProcessor(Pipe child) { - return new UnaryPipe(Source.EMPTY, null, child, null); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java index 50c5dcc228fdd..f7bbbd9f61189 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java @@ -6,18 +6,18 @@ */ package org.elasticsearch.xpack.esql.core.expression.gen.processor; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessorTests; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryArithmeticProcessorTests; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessorTests; import org.elasticsearch.xpack.esql.core.expression.processor.Processors; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109012") public class ChainingProcessorTests extends AbstractWireSerializingTestCase { public static ChainingProcessor randomComposeProcessor() { return new ChainingProcessor(randomProcessor(), randomProcessor()); @@ -52,8 +52,6 @@ public static Processor randomProcessor() { List> options = new ArrayList<>(); options.add(ChainingProcessorTests::randomComposeProcessor); options.add(BinaryLogicProcessorTests::randomProcessor); - options.add(BinaryArithmeticProcessorTests::randomProcessor); - options.add(BinaryComparisonProcessorTests::randomProcessor); return randomFrom(options).get(); } } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java deleted file mode 100644 index 759acc29a5e5a..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessorTests.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.core.TestUtils; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.processor.Processors; -import org.elasticsearch.xpack.esql.core.util.NumericUtils; - -import java.math.BigInteger; - -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; - -public class BinaryArithmeticProcessorTests extends AbstractWireSerializingTestCase { - public static BinaryArithmeticProcessor randomProcessor() { - return new BinaryArithmeticProcessor( - new ConstantProcessor(randomLong()), - new ConstantProcessor(randomLong()), - randomFrom(DefaultBinaryArithmeticOperation.values()) - ); - } - - @Override - protected BinaryArithmeticProcessor createTestInstance() { - return randomProcessor(); - } - - @Override - protected BinaryArithmeticProcessor mutateInstance(BinaryArithmeticProcessor instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Reader instanceReader() { - return BinaryArithmeticProcessor::new; - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Processors.getNamedWriteables()); - } - - public void testAdd() { - Processor ba = new Add(EMPTY, l(7), l(3)).makePipe().asProcessor(); - assertEquals(10, ba.process(null)); - } - - public void testAddUnsignedLong() { - Processor ba = new Add(EMPTY, l(BigInteger.valueOf(7)), l(3)).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(10), ba.process(null)); - - ba = new Add(EMPTY, l(BigInteger.ONE), l(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE))).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), ba.process(null)); - - ba = new Add(EMPTY, l(BigInteger.valueOf(7)), l((short) -3)).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(4), ba.process(null)); - - ba = new Add(EMPTY, l(BigInteger.valueOf(7)), l(-3f)).makePipe().asProcessor(); - assertEquals(4f, ba.process(null)); - - Processor pn = new Add(EMPTY, l(BigInteger.valueOf(7)), l(-8)).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> pn.process(null)); - - Processor pm = new Add(EMPTY, l(NumericUtils.UNSIGNED_LONG_MAX), l(1)).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> pm.process(null)); - } - - public void testSub() { - Processor ba = new Sub(EMPTY, l(7), l(3)).makePipe().asProcessor(); - assertEquals(4, ba.process(null)); - } - - public void testSubUnsignedLong() { - Processor bs = new Sub(EMPTY, l(BigInteger.valueOf(7)), l(3)).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(4), bs.process(null)); - - bs = new Sub(EMPTY, l(BigInteger.valueOf(7)), l((short) -3)).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(10), bs.process(null)); - - bs = new Sub(EMPTY, l(BigInteger.valueOf(7)), l(3f)).makePipe().asProcessor(); - assertEquals(4f, bs.process(null)); - - Processor proc = new Sub(EMPTY, l(BigInteger.valueOf(7)), l(8)).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> proc.process(null)); - } - - public void testMul() { - Processor ba = new Mul(EMPTY, l(7), l(3)).makePipe().asProcessor(); - assertEquals(21, ba.process(null)); - } - - public void testMulUnsignedLong() { - Processor bm = new Mul(EMPTY, l(BigInteger.valueOf(7)), l(3)).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(21), bm.process(null)); - - bm = new Mul(EMPTY, l(BigInteger.valueOf(7)), l(3f)).makePipe().asProcessor(); - assertEquals(21f, bm.process(null)); - - Processor proc = new Mul(EMPTY, l(BigInteger.valueOf(7)), l(-8)).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> proc.process(null)); - } - - public void testDiv() { - Processor ba = new Div(EMPTY, l(7), l(3)).makePipe().asProcessor(); - assertEquals(2, ((Number) ba.process(null)).longValue()); - ba = new Div(EMPTY, l((double) 7), l(3)).makePipe().asProcessor(); - assertEquals(2.33, ((Number) ba.process(null)).doubleValue(), 0.01d); - } - - public void testDivUnsignedLong() { - Processor bd = new Div(EMPTY, l(BigInteger.valueOf(7)), l(3)).makePipe().asProcessor(); - assertEquals(BigInteger.TWO, bd.process(null)); - - bd = new Div(EMPTY, l(7), l(BigInteger.valueOf(8))).makePipe().asProcessor(); - assertEquals(BigInteger.ZERO, bd.process(null)); - - bd = new Div(EMPTY, l(BigInteger.valueOf(7)), l(3f)).makePipe().asProcessor(); - assertEquals(7 / 3f, bd.process(null)); - - Processor proc = new Div(EMPTY, l(BigInteger.valueOf(7)), l(-2)).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> proc.process(null)); - } - - public void testMod() { - Processor ba = new Mod(EMPTY, l(7), l(3)).makePipe().asProcessor(); - assertEquals(1, ba.process(null)); - } - - public void testModUnsignedLong() { - Processor bm = new Mod(EMPTY, l(BigInteger.valueOf(7)), l(3)).makePipe().asProcessor(); - assertEquals(BigInteger.valueOf(1), bm.process(null)); - - Processor proc = new Mod(EMPTY, l(-7), l(BigInteger.valueOf(3))).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> proc.process(null)); - } - - public void testNegate() { - Processor ba = new Neg(EMPTY, l(7)).asPipe().asProcessor(); - assertEquals(-7, ba.process(null)); - } - - public void testNegateUnsignedLong() { - Processor nm = new Neg(EMPTY, l(BigInteger.valueOf(0))).makePipe().asProcessor(); - assertEquals(BigInteger.ZERO, nm.process(null)); - - Processor proc = new Neg(EMPTY, l(BigInteger.valueOf(3))).makePipe().asProcessor(); - expectThrows(ArithmeticException.class, () -> proc.process(null)); - } - - // ((3*2+4)/2-2)%2 - public void testTree() { - Expression mul = new Mul(EMPTY, l(3), l(2)); - Expression add = new Add(EMPTY, mul, l(4)); - Expression div = new Div(EMPTY, add, l(2)); - Expression sub = new Sub(EMPTY, div, l(2)); - Mod mod = new Mod(EMPTY, sub, l(2)); - - Processor proc = mod.makePipe().asProcessor(); - assertEquals(1, proc.process(null)); - } - - // ((3*2+4)/2-2)%2 - public void testTreeUnsignedLong() { - Expression mul = new Mul(EMPTY, l(3), l(BigInteger.TWO)); - Expression add = new Add(EMPTY, mul, l(4)); - Expression div = new Div(EMPTY, add, l(2)); - Expression sub = new Sub(EMPTY, div, l(2)); - Mod mod = new Mod(EMPTY, sub, l(2)); - - Processor proc = mod.makePipe().asProcessor(); - assertEquals(BigInteger.ONE, proc.process(null)); - } - - public void testHandleNull() { - assertNull(new Add(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); - assertNull(new Sub(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); - assertNull(new Mul(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); - assertNull(new Div(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); - assertNull(new Mod(EMPTY, l(null), l(3)).makePipe().asProcessor().process(null)); - assertNull(new Neg(EMPTY, l(null)).makePipe().asProcessor().process(null)); - } - - private static Literal l(Object value) { - return TestUtils.of(EMPTY, value); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java deleted file mode 100644 index 22b2702a0ca32..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessorTests.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.core.TestUtils; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.processor.Processors; - -import java.math.BigInteger; - -import static org.elasticsearch.xpack.esql.core.TestUtils.equalsOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.greaterThanOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.greaterThanOrEqualOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.lessThanOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.lessThanOrEqualOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.notEqualsOf; -import static org.elasticsearch.xpack.esql.core.TestUtils.nullEqualsOf; -import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; - -public class BinaryComparisonProcessorTests extends AbstractWireSerializingTestCase { - public static BinaryComparisonProcessor randomProcessor() { - return new BinaryComparisonProcessor( - new ConstantProcessor(randomLong()), - new ConstantProcessor(randomLong()), - randomFrom(BinaryComparisonProcessor.BinaryComparisonOperation.values()) - ); - } - - @Override - protected BinaryComparisonProcessor createTestInstance() { - return randomProcessor(); - } - - @Override - protected BinaryComparisonProcessor mutateInstance(BinaryComparisonProcessor instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Reader instanceReader() { - return BinaryComparisonProcessor::new; - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Processors.getNamedWriteables()); - } - - public void testEq() { - assertEquals(true, equalsOf(l(4), l(4)).makePipe().asProcessor().process(null)); - assertEquals(false, equalsOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(true, equalsOf(l(BigInteger.valueOf(4)), l(4L)).makePipe().asProcessor().process(null)); - assertEquals(false, equalsOf(l(BigInteger.valueOf(3)), l(4L)).makePipe().asProcessor().process(null)); - } - - public void testNullEq() { - assertEquals(true, nullEqualsOf(l(4), l(4)).makePipe().asProcessor().process(null)); - assertEquals(false, nullEqualsOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(true, nullEqualsOf(NULL, NULL).makePipe().asProcessor().process(null)); - assertEquals(false, nullEqualsOf(l(4), NULL).makePipe().asProcessor().process(null)); - assertEquals(false, nullEqualsOf(NULL, l(4)).makePipe().asProcessor().process(null)); - } - - public void testNEq() { - assertEquals(false, notEqualsOf(l(4), l(4)).makePipe().asProcessor().process(null)); - assertEquals(true, notEqualsOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(true, notEqualsOf(l(BigInteger.valueOf(3)), l(4)).makePipe().asProcessor().process(null)); - } - - public void testGt() { - assertEquals(true, greaterThanOf(l(4), l(3)).makePipe().asProcessor().process(null)); - assertEquals(false, greaterThanOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(false, greaterThanOf(l(3), l(3)).makePipe().asProcessor().process(null)); - assertEquals(true, greaterThanOf(l(4), l(BigInteger.valueOf(3))).makePipe().asProcessor().process(null)); - } - - public void testGte() { - assertEquals(true, greaterThanOrEqualOf(l(4), l(3)).makePipe().asProcessor().process(null)); - assertEquals(false, greaterThanOrEqualOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(true, greaterThanOrEqualOf(l(3), l(3)).makePipe().asProcessor().process(null)); - assertEquals(true, greaterThanOrEqualOf(l(BigInteger.valueOf(3)), l(3L)).makePipe().asProcessor().process(null)); - assertEquals(true, greaterThanOrEqualOf(l(BigInteger.valueOf(4)), l(3L)).makePipe().asProcessor().process(null)); - assertEquals(false, greaterThanOrEqualOf(l(BigInteger.valueOf(3)), l(4L)).makePipe().asProcessor().process(null)); - } - - public void testLt() { - assertEquals(false, lessThanOf(l(4), l(3)).makePipe().asProcessor().process(null)); - assertEquals(true, lessThanOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(false, lessThanOf(l(3), l(3)).makePipe().asProcessor().process(null)); - assertEquals(false, lessThanOf(l(3), l(BigInteger.valueOf(3))).makePipe().asProcessor().process(null)); - } - - public void testLte() { - assertEquals(false, lessThanOrEqualOf(l(4), l(3)).makePipe().asProcessor().process(null)); - assertEquals(true, lessThanOrEqualOf(l(3), l(4)).makePipe().asProcessor().process(null)); - assertEquals(true, lessThanOrEqualOf(l(3), l(3)).makePipe().asProcessor().process(null)); - assertEquals(false, lessThanOrEqualOf(l(4), l(BigInteger.valueOf(3))).makePipe().asProcessor().process(null)); - assertEquals(true, lessThanOrEqualOf(l(3), l(BigInteger.valueOf(4))).makePipe().asProcessor().process(null)); - assertEquals(true, lessThanOrEqualOf(l(3), l(BigInteger.valueOf(3))).makePipe().asProcessor().process(null)); - } - - public void testHandleNull() { - assertNull(equalsOf(NULL, l(3)).makePipe().asProcessor().process(null)); - assertNull(notEqualsOf(NULL, l(3)).makePipe().asProcessor().process(null)); - assertNull(greaterThanOf(NULL, l(3)).makePipe().asProcessor().process(null)); - assertNull(greaterThanOrEqualOf(NULL, l(3)).makePipe().asProcessor().process(null)); - assertNull(lessThanOf(NULL, l(3)).makePipe().asProcessor().process(null)); - assertNull(lessThanOrEqualOf(NULL, l(3)).makePipe().asProcessor().process(null)); - } - - private static Literal l(Object value) { - return TestUtils.of(EMPTY, value); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessorTests.java deleted file mode 100644 index fa8563b914e11..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessorTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.core.TestUtils; -import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.processor.Processors; - -import java.util.Arrays; - -import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; -import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; - -public class InProcessorTests extends AbstractWireSerializingTestCase { - - private static final Literal ONE = L(1); - private static final Literal TWO = L(2); - private static final Literal THREE = L(3); - - public static InProcessor randomProcessor() { - return new InProcessor(Arrays.asList(new ConstantProcessor(randomLong()), new ConstantProcessor(randomLong()))); - } - - @Override - protected InProcessor createTestInstance() { - return randomProcessor(); - } - - @Override - protected InProcessor mutateInstance(InProcessor instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Reader instanceReader() { - return InProcessor::new; - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Processors.getNamedWriteables()); - } - - public void testEq() { - assertEquals(true, new In(EMPTY, TWO, Arrays.asList(ONE, TWO, THREE)).makePipe().asProcessor().process(null)); - assertEquals(false, new In(EMPTY, THREE, Arrays.asList(ONE, TWO)).makePipe().asProcessor().process(null)); - } - - public void testHandleNullOnLeftValue() { - assertNull(new In(EMPTY, NULL, Arrays.asList(ONE, TWO, THREE)).makePipe().asProcessor().process(null)); - assertNull(new In(EMPTY, NULL, Arrays.asList(ONE, NULL, TWO)).makePipe().asProcessor().process(null)); - } - - public void testHandleNullOnRightValue() { - assertEquals(true, new In(EMPTY, THREE, Arrays.asList(ONE, NULL, THREE)).makePipe().asProcessor().process(null)); - assertNull(new In(EMPTY, TWO, Arrays.asList(ONE, NULL, THREE)).makePipe().asProcessor().process(null)); - } - - private static Literal L(Object value) { - return TestUtils.of(EMPTY, value); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java index 03598d3bc167d..440e48b30536b 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; @@ -1747,50 +1746,6 @@ public void testIsNotNullOnOperatorWithTwoFields() { assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); } - public void testIsNotNullOnFunctionWithOneField() { - EsRelation relation = relation(); - var fieldA = TestUtils.getFieldAttribute("a"); - var pattern = L("abc"); - Expression inn = isNotNull( - new And(EMPTY, new TestStartsWith(EMPTY, fieldA, pattern, false), greaterThanOf(new Add(EMPTY, ONE, TWO), THREE)) - ); - - Filter f = new Filter(EMPTY, relation, inn); - Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); - - assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); - } - - public void testIsNotNullOnFunctionWithTwoFields() { - EsRelation relation = relation(); - var fieldA = TestUtils.getFieldAttribute("a"); - var fieldB = TestUtils.getFieldAttribute("b"); - var pattern = L("abc"); - Expression inn = isNotNull(new TestStartsWith(EMPTY, fieldA, fieldB, false)); - - Filter f = new Filter(EMPTY, relation, inn); - Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); - - assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); - } - - public static class TestStartsWith extends StartsWith { - - public TestStartsWith(Source source, Expression input, Expression pattern, boolean caseInsensitive) { - super(source, input, pattern, caseInsensitive); - } - - @Override - public Expression replaceChildren(List newChildren) { - return new TestStartsWith(source(), newChildren.get(0), newChildren.get(1), isCaseInsensitive()); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, TestStartsWith::new, input(), pattern(), isCaseInsensitive()); - } - } - public void testIsNotNullOnFunctionWithTwoField() {} private IsNotNull isNotNull(Expression field) { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java index 328e0b3248a4e..80f63b1293e61 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java @@ -17,14 +17,10 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttributeTests; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.AggExtractorInput; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.BinaryPipesTests; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.InPipe; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.tree.NodeTests.ChildrenAreAProperty; @@ -56,7 +52,6 @@ import java.util.Objects; import java.util.Set; import java.util.function.Predicate; -import java.util.function.Supplier; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; @@ -90,7 +85,7 @@ */ public class NodeSubclassTests> extends ESTestCase { - private static final List> CLASSES_WITH_MIN_TWO_CHILDREN = asList(In.class, InPipe.class); + private static final List> CLASSES_WITH_MIN_TWO_CHILDREN = asList(In.class); private final Class subclass; @@ -374,33 +369,6 @@ private Object makeArg(Class> toBuildClass, Type argType) thro Enum enm = (Enum) makeArg(toBuildClass, pt.getActualTypeArguments()[0]); return EnumSet.of(enm); } - if (pt.getRawType() == Supplier.class) { - if (toBuildClass == AggExtractorInput.class) { - // AggValueInput just needs a valid java type in a supplier - Object o = randomBoolean() ? null : randomAlphaOfLength(5); - // But the supplier has to implement equals for randomValueOtherThan - return new Supplier<>() { - @Override - public Object get() { - return o; - } - - @Override - public int hashCode() { - return Objects.hash(o); - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - Supplier other = (Supplier) obj; - return Objects.equals(o, other.get()); - } - }; - } - } Object obj = pluggableMakeParameterizedArg(toBuildClass, pt); if (obj != null) { return obj; @@ -473,14 +441,6 @@ public boolean equals(Object obj) { return new EnrichPolicy(randomFrom("match", "range"), null, List.of(), randomFrom("m1", "m2"), enrichFields); } - if (Pipe.class == argClass) { - /* - * Similar to expressions, mock pipes to avoid - * stackoverflow errors while building the tree. - */ - return BinaryPipesTests.randomUnaryPipe(); - } - if (Processor.class == argClass) { /* * Similar to expressions, mock pipes to avoid diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java index 74191970db896..9b56c2f6bd63f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java @@ -113,8 +113,7 @@ public final boolean mayHaveNulls() { return nullsMask != null; } - @Override - public final int nullValuesCount() { + final int nullValuesCount() { return mayHaveNulls() ? nullsMask.cardinality() : 0; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index 027eda8eb9be3..fb52cc39f44d2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -31,11 +31,6 @@ public final boolean isNull(int position) { return false; } - @Override - public final int nullValuesCount() { - return 0; - } - @Override public final boolean mayHaveNulls() { return false; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 9a6b701a2e4ea..ca3ce1349c47f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -104,11 +104,6 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R */ boolean isNull(int position); - /** - * @return the number of null values in this block. - */ - int nullValuesCount(); - /** * @return true if some values might be null. False, if all values are guaranteed to be not null. */ @@ -284,7 +279,8 @@ static List getNamedWriteables() { DoubleBlock.ENTRY, BytesRefBlock.ENTRY, BooleanBlock.ENTRY, - ConstantNullBlock.ENTRY + ConstantNullBlock.ENTRY, + CompositeBlock.ENTRY ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 03c1ff05ae99e..7e846bd32e3cb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -272,6 +272,7 @@ private static Object valueAtOffset(Block block, int offset) { DocVector v = ((DocBlock) block).asVector(); yield new Doc(v.shards().getInt(offset), v.segments().getInt(offset), v.docs().getInt(offset)); } + case COMPOSITE -> throw new IllegalArgumentException("can't read values from composite blocks"); case UNKNOWN -> throw new IllegalArgumentException("can't read values from [" + block + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java new file mode 100644 index 0000000000000..c107ea53bd7f4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.Accountable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public final class CompositeBlock extends AbstractNonThreadSafeRefCounted implements Block { + private final Block[] blocks; + private final int positionCount; + + public CompositeBlock(Block[] blocks) { + if (blocks == null || blocks.length == 0) { + throw new IllegalArgumentException("must have at least one block; got " + Arrays.toString(blocks)); + } + this.blocks = blocks; + this.positionCount = blocks[0].getPositionCount(); + for (Block b : blocks) { + assert b.getPositionCount() == positionCount : "expected positionCount=" + positionCount + " but was " + b; + if (b.getPositionCount() != positionCount) { + assert false : "expected positionCount=" + positionCount + " but was " + b; + throw new IllegalArgumentException("expected positionCount=" + positionCount + " but was " + b); + } + if (b.isReleased()) { + assert false : "can't build composite block out of released blocks but [" + b + "] was released"; + throw new IllegalArgumentException("can't build composite block out of released blocks but [" + b + "] was released"); + } + } + } + + static NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "CompositeBlock", CompositeBlock::readFrom); + + @Override + public Vector asVector() { + return null; + } + + /** + * Returns the block at the given block index. + */ + public B getBlock(int blockIndex) { + @SuppressWarnings("unchecked") + B block = (B) blocks[blockIndex]; + return block; + } + + /** + * Returns the number of blocks in this composite block. + */ + public int getBlockCount() { + return blocks.length; + } + + @Override + public boolean mvSortedAscending() { + return Arrays.stream(blocks).allMatch(Block::mvSortedAscending); + } + + @Override + public boolean mvDeduplicated() { + return Arrays.stream(blocks).allMatch(Block::mvDeduplicated); + } + + @Override + public int getPositionCount() { + return positionCount; + } + + @Override + public int getTotalValueCount() { + throw new UnsupportedOperationException("Composite block"); + } + + @Override + public int getFirstValueIndex(int position) { + throw new UnsupportedOperationException("Composite block"); + } + + @Override + public int getValueCount(int position) { + throw new UnsupportedOperationException("Composite block"); + } + + @Override + public boolean isNull(int position) { + throw new UnsupportedOperationException("Composite block"); + } + + @Override + public ElementType elementType() { + return ElementType.COMPOSITE; + } + + @Override + public BlockFactory blockFactory() { + return blocks[0].blockFactory(); + } + + @Override + public void allowPassingToDifferentDriver() { + for (Block block : blocks) { + block.allowPassingToDifferentDriver(); + } + } + + @Override + public boolean mayHaveNulls() { + return Arrays.stream(blocks).anyMatch(Block::mayHaveNulls); + } + + @Override + public boolean areAllValuesNull() { + return Arrays.stream(blocks).allMatch(Block::areAllValuesNull); + } + + @Override + public boolean mayHaveMultivaluedFields() { + return Arrays.stream(blocks).anyMatch(Block::mayHaveMultivaluedFields); + } + + @Override + public CompositeBlock filter(int... positions) { + CompositeBlock result = null; + final Block[] filteredBlocks = new Block[blocks.length]; + try { + for (int i = 0; i < blocks.length; i++) { + filteredBlocks[i] = blocks[i].filter(positions); + } + result = new CompositeBlock(filteredBlocks); + return result; + } finally { + if (result == null) { + Releasables.close(filteredBlocks); + } + } + } + + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + // TODO: support this + throw new UnsupportedOperationException("can't lookup values from CompositeBlock"); + } + + @Override + public MvOrdering mvOrdering() { + return MvOrdering.UNORDERED; + } + + @Override + public CompositeBlock expand() { + throw new UnsupportedOperationException("CompositeBlock"); + } + + @Override + public long ramBytesUsed() { + return Arrays.stream(blocks).mapToLong(Accountable::ramBytesUsed).sum(); + } + + @Override + public String getWriteableName() { + return "CompositeBlock"; + } + + static Block readFrom(StreamInput in) throws IOException { + final int numBlocks = in.readVInt(); + boolean success = false; + final Block[] blocks = new Block[numBlocks]; + try { + for (int b = 0; b < numBlocks; b++) { + blocks[b] = in.readNamedWriteable(Block.class); + } + CompositeBlock result = new CompositeBlock(blocks); + success = true; + return result; + } finally { + if (success == false) { + Releasables.close(blocks); + } + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(blocks.length); + for (Block block : blocks) { + out.writeNamedWriteable(block); + } + } + + @Override + protected void closeInternal() { + Releasables.close(blocks); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CompositeBlock that = (CompositeBlock) o; + return positionCount == that.positionCount && Objects.deepEquals(blocks, that.blocks); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(blocks), positionCount); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 3b08f46e6e7fc..ae14033a00b3e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -52,11 +52,6 @@ public boolean isNull(int position) { return true; } - @Override - public int nullValuesCount() { - return getPositionCount(); - } - @Override public boolean areAllValuesNull() { return true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 2f7d65c8719e6..5796153748817 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -29,6 +29,11 @@ public enum ElementType { */ DOC(DocBlock::newBlockBuilder), + /** + * Composite blocks which contain array of sub-blocks. + */ + COMPOSITE((blockFactory, estimatedSize) -> { throw new UnsupportedOperationException("can't build composite blocks"); }), + /** * Intermediate blocks which don't support retrieving elements. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java index 947d0daded40d..321c319f06671 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java @@ -176,11 +176,6 @@ public boolean isNull(int position) { return ordinals.isNull(position); } - @Override - public int nullValuesCount() { - return ordinals.nullValuesCount(); - } - @Override public boolean mayHaveNulls() { return ordinals.mayHaveNulls(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 2537809fbd8ec..785db826aadd6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -361,11 +361,11 @@ private static void schedule( @Override protected void doRun() { + SubscribableListener fut = driver.run(maxTime, maxIterations, System::nanoTime); if (driver.isFinished()) { onComplete(listener); return; } - SubscribableListener fut = driver.run(maxTime, maxIterations, System::nanoTime); if (fut.isDone()) { schedule(maxTime, maxIterations, threadContext, executor, driver, listener); } else { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java index bb8d3fd269a8a..23639109915e2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -147,7 +147,7 @@ static List valuesAggregatorForGroupings(List new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); - case NULL, DOC, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); + case NULL, DOC, COMPOSITE, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); }); aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 499f552cca816..259d4f1249d69 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -1305,7 +1305,9 @@ public void add(int positionOffset, IntVector groupIds) { try (ReleasableIterator lookup = blockHash.lookup(new Page(keys), ByteSizeValue.ofKb(between(1, 100)))) { while (lookup.hasNext()) { try (IntBlock ords = lookup.next()) { - assertThat(ords.nullValuesCount(), equalTo(0)); + for (int p = 0; p < ords.getPositionCount(); p++) { + assertFalse(ords.isNull(p)); + } } } } finally { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 1d58d49c92828..ac0ad5f9fbd99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -166,7 +166,6 @@ static void assertSingleValueDenseBlock(Block initialBlock) { assertThat(block.asVector().getPositionCount(), is(positionCount)); assertThat(block.asVector().asBlock().getTotalValueCount(), is(positionCount)); assertThat(block.asVector().asBlock().getPositionCount(), is(positionCount)); - assertThat(block.nullValuesCount(), is(0)); assertThat(block.mayHaveNulls(), is(false)); assertThat(block.areAllValuesNull(), is(false)); assertThat(block.mayHaveMultivaluedFields(), is(false)); @@ -823,7 +822,6 @@ public void testSingleValueSparseInt() { assertThat(block.getInt(i), is(values[i])); } } - assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); block.close(); } @@ -857,7 +855,6 @@ public void testSingleValueSparseLong() { assertThat(block.getLong(i), is(values[i])); } } - assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); block.close(); } @@ -891,7 +888,6 @@ public void testSingleValueSparseDouble() { assertThat(block.getDouble(i), is(values[i])); } } - assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); block.close(); } @@ -925,7 +921,6 @@ public void testSingleValueSparseBoolean() { assertThat(block.getBoolean(i), is(values[i])); } } - assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); block.close(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java index e3a9aba0d1b7f..dc12a78954c5e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java @@ -22,13 +22,13 @@ public class BlockBuilderCopyFromTests extends ESTestCase { @ParametersFactory public static List params() { List params = new ArrayList<>(); - for (ElementType elementType : ElementType.values()) { - if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + for (ElementType e : ElementType.values()) { + if (e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE) { continue; } for (boolean nullAllowed : new boolean[] { false, true }) { for (int[] valuesPerPosition : new int[][] { new int[] { 1, 1 }, new int[] { 1, 10 } }) { // TODO 0 - params.add(new Object[] { elementType, nullAllowed, valuesPerPosition[0], valuesPerPosition[1] }); + params.add(new Object[] { e, nullAllowed, valuesPerPosition[0], valuesPerPosition[1] }); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java index 6b5c37ee26888..eb2c750e3b2d7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java @@ -31,11 +31,11 @@ public class BlockBuilderTests extends ESTestCase { @ParametersFactory public static List params() { List params = new ArrayList<>(); - for (ElementType elementType : ElementType.values()) { - if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + for (ElementType e : ElementType.values()) { + if (e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE) { continue; } - params.add(new Object[] { elementType }); + params.add(new Object[] { e }); } return params; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java index 4579eb688d95e..89e44a1763b0f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java @@ -36,12 +36,12 @@ public class BlockMultiValuedTests extends ESTestCase { @ParametersFactory public static List params() { List params = new ArrayList<>(); - for (ElementType elementType : ElementType.values()) { - if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + for (ElementType e : ElementType.values()) { + if (e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE) { continue; } for (boolean nullAllowed : new boolean[] { false, true }) { - params.add(new Object[] { elementType, nullAllowed }); + params.add(new Object[] { e, nullAllowed }); } } return params; @@ -169,7 +169,6 @@ private int[] randomFilterPositions(Block orig, boolean all, boolean shuffled) { private void assertExpanded(Block orig) { try (orig; Block expanded = orig.expand()) { - assertThat(expanded.getPositionCount(), equalTo(orig.getTotalValueCount() + orig.nullValuesCount())); assertThat(expanded.getTotalValueCount(), equalTo(orig.getTotalValueCount())); int np = 0; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index 0dfb72274b9d9..2daf7755841f7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -26,6 +26,7 @@ import java.util.List; import java.util.stream.IntStream; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class BlockSerializationTests extends SerializationTestCase { @@ -336,6 +337,30 @@ public void testOrdinalBlock() throws Exception { } } + public void testCompositeBlock() throws Exception { + final int numBlocks = randomIntBetween(1, 10); + final int positionCount = randomIntBetween(1, 1000); + final Block[] blocks = new Block[numBlocks]; + for (int b = 0; b < numBlocks; b++) { + ElementType elementType = randomFrom(ElementType.LONG, ElementType.DOUBLE, ElementType.BOOLEAN, ElementType.NULL); + blocks[b] = BasicBlockTests.randomBlock(blockFactory, elementType, positionCount, true, 0, between(1, 2), 0, between(1, 2)) + .block(); + } + try (CompositeBlock origBlock = new CompositeBlock(blocks)) { + assertThat(origBlock.getBlockCount(), equalTo(numBlocks)); + for (int b = 0; b < numBlocks; b++) { + assertThat(origBlock.getBlock(b), equalTo(blocks[b])); + } + try (CompositeBlock deserBlock = serializeDeserializeBlock(origBlock)) { + assertThat(deserBlock.getBlockCount(), equalTo(numBlocks)); + for (int b = 0; b < numBlocks; b++) { + assertThat(deserBlock.getBlock(b), equalTo(origBlock.getBlock(b))); + } + EqualsHashCodeTestUtils.checkEqualsAndHashCode(deserBlock, unused -> deserBlock); + } + } + } + static BytesRef randomBytesRef() { return new BytesRef(randomAlphaOfLengthBetween(0, 10)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java index e42e2b47e4e99..b02ef6d8e9589 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java @@ -36,6 +36,7 @@ public static Object randomValue(ElementType e) { case BOOLEAN -> randomBoolean(); case DOC -> new BlockUtils.Doc(randomInt(), randomInt(), between(0, Integer.MAX_VALUE)); case NULL -> null; + case COMPOSITE -> throw new IllegalArgumentException("can't make random values for composite"); case UNKNOWN -> throw new IllegalArgumentException("can't make random values for [" + e + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java new file mode 100644 index 0000000000000..8df2e27827b48 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.compute.operator.ComputeTestCase; + +import java.util.Arrays; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class CompositeBlockTests extends ComputeTestCase { + + static List supportedSubElementTypes = Arrays.stream(ElementType.values()) + .filter(e -> e != ElementType.COMPOSITE && e != ElementType.UNKNOWN && e != ElementType.DOC) + .toList(); + + public static CompositeBlock randomCompositeBlock(BlockFactory blockFactory, int numBlocks, int positionCount) { + Block[] blocks = new Block[numBlocks]; + for (int b = 0; b < numBlocks; b++) { + ElementType elementType = randomFrom(supportedSubElementTypes); + blocks[b] = BasicBlockTests.randomBlock( + blockFactory, + elementType, + positionCount, + elementType == ElementType.NULL || randomBoolean(), + 0, + between(1, 2), + 0, + between(1, 2) + ).block(); + } + return new CompositeBlock(blocks); + } + + public void testFilter() { + final BlockFactory blockFactory = blockFactory(); + int numBlocks = randomIntBetween(1, 1000); + int positionCount = randomIntBetween(1, 1000); + try (CompositeBlock origComposite = randomCompositeBlock(blockFactory, numBlocks, positionCount)) { + int[] selected = new int[randomIntBetween(0, positionCount * 3)]; + for (int i = 0; i < selected.length; i++) { + selected[i] = randomIntBetween(0, positionCount - 1); + } + try (CompositeBlock filteredComposite = origComposite.filter(selected)) { + assertThat(filteredComposite.getBlockCount(), equalTo(numBlocks)); + assertThat(filteredComposite.getPositionCount(), equalTo(selected.length)); + for (int b = 0; b < numBlocks; b++) { + try (Block filteredSub = origComposite.getBlock(b).filter(selected)) { + assertThat(filteredComposite.getBlock(b), equalTo(filteredSub)); + } + } + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java index dc78b3715d12a..b57819383bfbd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java @@ -133,7 +133,6 @@ public void testFilterOnNull() { assertTrue(filtered.isNull(0)); assertTrue(filtered.mayHaveNulls()); assertFalse(filtered.areAllValuesNull()); - assertEquals(1, filtered.nullValuesCount()); assertEquals(2, filtered.getTotalValueCount()); assertFalse(filtered.isNull(1)); assertEquals(30, filtered.getInt(filtered.getFirstValueIndex(1))); @@ -161,7 +160,6 @@ public void testFilterOnAllNullsBlock() { assertTrue(filtered.isNull(0)); assertTrue(filtered.mayHaveNulls()); assertTrue(filtered.areAllValuesNull()); - assertEquals(3, filtered.nullValuesCount()); assertEquals(0, filtered.getTotalValueCount()); block.close(); releaseAndAssertBreaker(filtered); @@ -184,7 +182,6 @@ public void testFilterOnNoNullsBlock() { assertFalse(filtered.isNull(0)); assertFalse(filtered.mayHaveNulls()); assertFalse(filtered.areAllValuesNull()); - assertEquals(0, filtered.nullValuesCount()); assertEquals(3, filtered.getTotalValueCount()); assertEquals(20, filtered.asVector().getInt(0)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java index 096db174a2580..d41ccc26bfe49 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -28,11 +28,11 @@ public class VectorBuilderTests extends ESTestCase { @ParametersFactory public static List params() { List params = new ArrayList<>(); - for (ElementType elementType : ElementType.values()) { - if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + for (ElementType e : ElementType.values()) { + if (e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE) { continue; } - params.add(new Object[] { elementType }); + params.add(new Object[] { e }); } return params; } @@ -113,7 +113,7 @@ public void testCranky() { private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactory) { return switch (elementType) { - case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case NULL, DOC, COMPOSITE, UNKNOWN -> throw new UnsupportedOperationException(); case BOOLEAN -> blockFactory.newBooleanVectorBuilder(estimatedSize); case BYTES_REF -> blockFactory.newBytesRefVectorBuilder(estimatedSize); case DOUBLE -> blockFactory.newDoubleVectorBuilder(estimatedSize); @@ -124,7 +124,7 @@ private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactor private void fill(Vector.Builder builder, Vector from) { switch (elementType) { - case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case NULL, DOC, COMPOSITE, UNKNOWN -> throw new UnsupportedOperationException(); case BOOLEAN -> { for (int p = 0; p < from.getPositionCount(); p++) { ((BooleanVector.Builder) builder).appendBoolean(((BooleanVector) from).getBoolean(p)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index cdfc7611ec678..f6b1acf4131d2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -29,6 +29,7 @@ public static List params() { List params = new ArrayList<>(); for (ElementType elementType : ElementType.values()) { if (elementType == ElementType.UNKNOWN + || elementType == ElementType.COMPOSITE || elementType == ElementType.NULL || elementType == ElementType.DOC || elementType == ElementType.BYTES_REF) { @@ -115,7 +116,7 @@ public void testCranky() { private Vector.Builder vectorBuilder(int size, BlockFactory blockFactory) { return switch (elementType) { - case NULL, BYTES_REF, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case NULL, BYTES_REF, DOC, COMPOSITE, UNKNOWN -> throw new UnsupportedOperationException(); case BOOLEAN -> blockFactory.newBooleanVectorFixedBuilder(size); case DOUBLE -> blockFactory.newDoubleVectorFixedBuilder(size); case INT -> blockFactory.newIntVectorFixedBuilder(size); @@ -125,7 +126,7 @@ private Vector.Builder vectorBuilder(int size, BlockFactory blockFactory) { private void fill(Vector.Builder builder, Vector from) { switch (elementType) { - case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case NULL, DOC, COMPOSITE, UNKNOWN -> throw new UnsupportedOperationException(); case BOOLEAN -> { for (int p = 0; p < from.getPositionCount(); p++) { ((BooleanVector.FixedBuilder) builder).appendBoolean(((BooleanVector) from).getBoolean(p)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 5d09af3afea17..8200529e18290 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -134,11 +134,11 @@ Block randomBlock(BlockFactory blockFactory, int size) { static ElementType randomElement() { List l = new ArrayList<>(); - for (ElementType elementType : ElementType.values()) { - if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + for (ElementType e : ElementType.values()) { + if (e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE) { continue; } - l.add(elementType); + l.add(e); } return randomFrom(l); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java index 0ea921ac15e78..dfa49ac134430 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java @@ -57,7 +57,7 @@ public class MultivalueDedupeTests extends ESTestCase { public static List supportedTypes() { List supported = new ArrayList<>(); for (ElementType elementType : ElementType.values()) { - if (oneOf(elementType, ElementType.UNKNOWN, ElementType.DOC)) { + if (oneOf(elementType, ElementType.UNKNOWN, ElementType.DOC, ElementType.COMPOSITE)) { continue; } supported.add(elementType); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index 24b682d67127d..40c6074fc7d3a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -40,6 +40,9 @@ public static Iterable parameters() { switch (e) { case UNKNOWN -> { } + case COMPOSITE -> { + // TODO: add later + } case BYTES_REF -> { cases.add(valueTestCase("single alpha", e, TopNEncoder.UTF8, () -> randomAlphaOfLength(5))); cases.add(valueTestCase("many alpha", e, TopNEncoder.UTF8, () -> randomList(2, 10, () -> randomAlphaOfLength(5)))); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 88c29c7492e22..b2195f205c93b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -65,6 +65,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.compute.data.ElementType.BOOLEAN; import static org.elasticsearch.compute.data.ElementType.BYTES_REF; +import static org.elasticsearch.compute.data.ElementType.COMPOSITE; import static org.elasticsearch.compute.data.ElementType.DOUBLE; import static org.elasticsearch.compute.data.ElementType.INT; import static org.elasticsearch.compute.data.ElementType.LONG; @@ -503,7 +504,7 @@ public void testCollectAllValues() { encoders.add(DEFAULT_SORTABLE); for (ElementType e : ElementType.values()) { - if (e == ElementType.UNKNOWN) { + if (e == ElementType.UNKNOWN || e == COMPOSITE) { continue; } elementTypes.add(e); @@ -575,7 +576,7 @@ public void testCollectAllValues_RandomMultiValues() { for (int type = 0; type < blocksCount; type++) { ElementType e = randomFrom(ElementType.values()); - if (e == ElementType.UNKNOWN) { + if (e == ElementType.UNKNOWN || e == COMPOSITE) { continue; } elementTypes.add(e); @@ -963,7 +964,7 @@ public void testRandomMultiValuesTopN() { for (int type = 0; type < blocksCount; type++) { ElementType e = randomValueOtherThanMany( - t -> t == ElementType.UNKNOWN || t == ElementType.DOC, + t -> t == ElementType.UNKNOWN || t == ElementType.DOC || t == COMPOSITE, () -> randomFrom(ElementType.values()) ); elementTypes.add(e); diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java index c7e9c3994ee4b..544eb82fb5ace 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java @@ -90,7 +90,6 @@ private Response runAsync(String user, String command) throws IOException { } XContentBuilder json = JsonXContent.contentBuilder(); json.startObject(); - json.field("version", ESQL_VERSION); json.field("query", command); addRandomPragmas(json); json.field("wait_for_completion_timeout", timeValueNanos(randomIntBetween(1, 1000))); diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 41df233af6459..7f0d9b9170c5e 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -38,8 +38,6 @@ import static org.hamcrest.Matchers.equalTo; public class EsqlSecurityIT extends ESRestTestCase { - static String ESQL_VERSION = "2024.04.01.🚀"; - @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) @@ -355,7 +353,6 @@ protected Response runESQLCommand(String user, String command) throws IOExceptio } XContentBuilder json = JsonXContent.contentBuilder(); json.startObject(); - json.field("version", ESQL_VERSION); json.field("query", command); addRandomPragmas(json); json.endObject(); diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 1b23228f33575..e4223f03c3a03 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -30,23 +30,16 @@ def supportedVersion = bwcVersion -> { return bwcVersion.onOrAfter(Version.fromString("8.11.0")); } -// Versions on and after 8.13.3 will get a `version` parameter -def versionUnsupported = bwcVersion -> { - return bwcVersion.before(Version.fromString("8.13.3")); -} - BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> def javaRestTest = tasks.register("v${bwcVersion}#javaRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) - systemProperty("tests.version_parameter_unsupported", versionUnsupported(bwcVersion)) maxParallelForks = 1 } def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) - systemProperty("tests.version_parameter_unsupported", versionUnsupported(bwcVersion)) testClassesDirs = sourceSets.yamlRestTest.output.classesDirs classpath = sourceSets.yamlRestTest.runtimeClasspath } diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java index 9bb114aaa6f6c..2c9833ba0793e 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java @@ -9,28 +9,14 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.http.HttpHost; -import org.elasticsearch.client.RestClient; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ClientYamlTestClient; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import org.elasticsearch.test.rest.yaml.ImpersonateOfficialClientTestClient; -import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; -import org.elasticsearch.test.rest.yaml.section.ApiCallSection; -import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; -import org.elasticsearch.test.rest.yaml.section.DoSection; -import org.elasticsearch.test.rest.yaml.section.ExecutableSection; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.function.Function; - public class EsqlClientYamlIT extends ESClientYamlSuiteTestCase { @ClassRule public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster(); @@ -46,9 +32,6 @@ public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) { @ParametersFactory public static Iterable parameters() throws Exception { - if (EsqlSpecTestCase.availableVersions().isEmpty()) { - return updateEsqlQueryDoSections(createParameters(), EsqlClientYamlIT::stripVersion); - } return createParameters(); } @@ -57,63 +40,4 @@ public static Iterable parameters() throws Exception { public void assertRequestBreakerEmpty() throws Exception { EsqlSpecTestCase.assertRequestBreakerEmpty(); } - - @Override - protected ClientYamlTestClient initClientYamlTestClient( - final ClientYamlSuiteRestSpec restSpec, - final RestClient restClient, - final List hosts - ) { - if (EsqlSpecTestCase.availableVersions().isEmpty()) { - return new ImpersonateOfficialClientTestClient(restSpec, restClient, hosts, this::getClientBuilderWithSniffedHosts, "es=8.13"); - } - return super.initClientYamlTestClient(restSpec, restClient, hosts); - } - - static DoSection stripVersion(DoSection doSection) { - ApiCallSection copy = doSection.getApiCallSection().copyWithNewApi(doSection.getApiCallSection().getApi()); - for (Map body : copy.getBodies()) { - body.remove("version"); - } - doSection.setApiCallSection(copy); - return doSection; - } - - // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt - public static Iterable updateEsqlQueryDoSections(Iterable parameters, Function modify) - throws Exception { - List result = new ArrayList<>(); - for (Object[] orig : parameters) { - assert orig.length == 1; - ClientYamlTestCandidate candidate = (ClientYamlTestCandidate) orig[0]; - try { - ClientYamlTestSection modified = new ClientYamlTestSection( - candidate.getTestSection().getLocation(), - candidate.getTestSection().getName(), - candidate.getTestSection().getPrerequisiteSection(), - candidate.getTestSection().getExecutableSections().stream().map(e -> modifyExecutableSection(e, modify)).toList() - ); - result.add(new Object[] { new ClientYamlTestCandidate(candidate.getRestTestSuite(), modified) }); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("error modifying " + candidate + ": " + e.getMessage(), e); - } - } - return result; - } - - // TODO: refactor, copied from single-node's AbstractEsqlClientYamlIt - private static ExecutableSection modifyExecutableSection(ExecutableSection e, Function modify) { - if (false == (e instanceof DoSection)) { - return e; - } - DoSection doSection = (DoSection) e; - String api = doSection.getApiCallSection().getApi(); - return switch (api) { - case "esql.query" -> modify.apply(doSection); - // case "esql.async_query", "esql.async_query_get" -> throw new IllegalArgumentException( - // "The esql yaml tests can't contain async_query or async_query_get because we modify them on the fly and *add* those." - // ); - default -> e; - }; - } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 9299e6abdafe1..9dae850d6f349 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.TestFeatureService; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; import org.junit.After; import org.junit.Before; @@ -122,9 +121,7 @@ void indexDocs(RestClient client, String index, List docs) throws IOExcepti } private Map run(String query) throws IOException { - Map resp = runEsql( - new RestEsqlTestCase.RequestObjectBuilder().query(query).version(EsqlTestUtils.latestEsqlVersionOrSnapshot()).build() - ); + Map resp = runEsql(new RestEsqlTestCase.RequestObjectBuilder().query(query).build()); logger.info("--> query {} response {}", query, resp); return resp; } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 3847ec7b74cae..587b503d1f8f5 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -11,7 +11,6 @@ import org.apache.http.HttpEntity; import org.apache.lucene.tests.util.TimeUnits; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; @@ -32,7 +31,6 @@ import org.elasticsearch.xpack.esql.core.SpecReader; import org.elasticsearch.xpack.esql.plugin.EsqlFeatures; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder; -import org.elasticsearch.xpack.esql.version.EsqlVersion; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -43,7 +41,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -79,13 +76,6 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { protected final CsvTestCase testCase; protected final Mode mode; - public static Set availableVersions() { - if ("true".equals(System.getProperty("tests.version_parameter_unsupported"))) { - return Set.of(); - } - return Build.current().isSnapshot() ? Set.of(EsqlVersion.values()) : Set.of(EsqlVersion.releasedAscending()); - } - public enum Mode { SYNC, ASYNC @@ -202,15 +192,8 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - String versionString = null; - // TODO: Read version range from csv-spec and skip if none of the versions are available. - if (availableVersions().isEmpty() == false) { - EsqlVersion version = randomFrom(availableVersions()); - versionString = randomBoolean() ? version.toString() : version.versionStringWithoutEmoji(); - } - Map answer = runEsql( - builder.query(testCase.query).version(versionString), + builder.query(testCase.query), testCase.expectedWarnings(false), testCase.expectedWarningsRegex() ); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index c7bb9d293e708..085bc7a22f185 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -26,7 +26,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.version.EsqlVersion; import org.hamcrest.Matcher; import org.junit.Before; @@ -1435,14 +1434,7 @@ private String deyaml(String err) { } private static Map runEsql(String query) throws IOException { - // Use the latest released version or SNAPSHOT, if available. - String versionString = EsqlSpecTestCase.availableVersions() - .stream() - .max(Comparator.comparingInt(EsqlVersion::id)) - .map(EsqlVersion::toString) - .orElse(null); - - return runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query(query).version(versionString)); + return runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query(query)); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java index 07abc26e8c789..759541a9ab5d1 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -13,7 +13,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.junit.After; import org.junit.Before; @@ -179,7 +178,7 @@ private Map runEsql(String query) throws IOException { } private Map runEsql(String query, Mode mode) throws IOException { - var requestObject = new RestEsqlTestCase.RequestObjectBuilder().query(query).version(EsqlTestUtils.latestEsqlVersionOrSnapshot()); + var requestObject = new RestEsqlTestCase.RequestObjectBuilder().query(query); if (mode == Mode.ASYNC) { return RestEsqlTestCase.runEsqlAsync(requestObject); } else { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 40c549f336ef6..3f21c9da31861 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -112,7 +112,6 @@ protected RestEsqlTestCase(Mode mode) { public static class RequestObjectBuilder { private final XContentBuilder builder; private boolean isBuilt = false; - private String version; private Boolean keepOnCompletion = null; @@ -130,11 +129,6 @@ public RequestObjectBuilder query(String query) throws IOException { return this; } - public RequestObjectBuilder version(String version) throws IOException { - this.version = version; - return this; - } - public RequestObjectBuilder columnar(boolean columnar) throws IOException { builder.field("columnar", columnar); return this; @@ -179,9 +173,6 @@ public RequestObjectBuilder pragmas(Settings pragmas) throws IOException { public RequestObjectBuilder build() throws IOException { if (isBuilt == false) { - if (version != null) { - builder.field("version", version); - } builder.endObject(); isBuilt = true; } @@ -622,11 +613,6 @@ public static Map runEsqlSync( options.setWarningsHandler(WarningsHandler.PERMISSIVE); // We assert the warnings ourselves options.addHeader("Content-Type", mediaType); - if (EsqlSpecTestCase.availableVersions().isEmpty()) { - // Masquerade as an old version of the official client, so we get the oldest version by default - options.addHeader("x-elastic-client-meta", "es=8.13"); - } - if (randomBoolean()) { options.addHeader("Accept", mediaType); } else { @@ -651,10 +637,6 @@ public static Map runEsqlAsync( RequestOptions.Builder options = request.getOptions().toBuilder(); options.setWarningsHandler(WarningsHandler.PERMISSIVE); // We assert the warnings ourselves options.addHeader("Content-Type", mediaType); - if ("true".equals(System.getProperty("tests.version_parameter_unsupported"))) { - // Masquerade as an old version of the official client, so we get the oldest version by default - options.addHeader("x-elastic-client-meta", "es=8.13"); - } if (randomBoolean()) { options.addHeader("Accept", mediaType); @@ -937,12 +919,8 @@ private static String repeatValueAsMV(Object value) { return "[" + value + ", " + value + "]"; } - public static RequestObjectBuilder requestObjectBuilder(String version) throws IOException { - return new RequestObjectBuilder().version(version); - } - public static RequestObjectBuilder requestObjectBuilder() throws IOException { - return requestObjectBuilder(EsqlTestUtils.latestEsqlVersionOrSnapshot()); + return new RequestObjectBuilder(); } @After diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 8cf5f6a7cf841..63c184e973cde 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -12,7 +12,6 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.esql.CsvTestsDataLoader; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; -import org.elasticsearch.xpack.esql.version.EsqlVersion; import org.junit.AfterClass; import org.junit.Before; @@ -98,9 +97,7 @@ private void checkException(EsqlQueryGenerator.QueryExecuted query) { private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { try { - Map a = RestEsqlTestCase.runEsqlSync( - new RestEsqlTestCase.RequestObjectBuilder().query(command).version(EsqlVersion.ROCKET.toString()).build() - ); + Map a = RestEsqlTestCase.runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query(command).build()); List outputSchema = outputSchema(a); return new EsqlQueryGenerator.QueryExecuted(command, depth, outputSchema, null); } catch (Exception e) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 9c1b4de803bcb..1927cfd03ac06 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -502,6 +502,7 @@ public static Type asType(ElementType elementType, Type actualType) { case BYTES_REF -> bytesRefBlockType(actualType); case BOOLEAN -> BOOLEAN; case DOC -> throw new IllegalArgumentException("can't assert on doc blocks"); + case COMPOSITE -> throw new IllegalArgumentException("can't assert on composite blocks"); case UNKNOWN -> throw new IllegalArgumentException("Unknown block types cannot be handled"); }; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index be93ac0ed3016..820d62eb50e37 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Build; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.settings.Settings; @@ -40,7 +39,6 @@ import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; -import org.elasticsearch.xpack.esql.version.EsqlVersion; import org.junit.Assert; import java.io.IOException; @@ -65,11 +63,6 @@ import static org.junit.Assert.assertTrue; public final class EsqlTestUtils { - public static String latestEsqlVersionOrSnapshot() { - EsqlVersion version = Build.current().isSnapshot() ? EsqlVersion.SNAPSHOT : EsqlVersion.latestReleased(); - return version.toString(); - } - public static class TestSearchStats extends SearchStats { public TestSearchStats() { super(emptyList()); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 04a752e79b2f4..22e3de8499bc1 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -25,7 +25,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.plugin.TransportEsqlQueryAction; @@ -40,22 +39,6 @@ @TestLogging(value = "org.elasticsearch.xpack.esql.session:DEBUG", reason = "to better understand planning") public abstract class AbstractEsqlIntegTestCase extends ESIntegTestCase { - public static EsqlQueryRequest asyncSyncRequestOnLatestVersion() { - EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest(); - applyLatestVersion(request); - return request; - } - - public static EsqlQueryRequest syncRequestOnLatestVersion() { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - applyLatestVersion(request); - return request; - } - - private static void applyLatestVersion(EsqlQueryRequest request) { - request.esqlVersion(EsqlTestUtils.latestEsqlVersionOrSnapshot()); - } - @After public void ensureExchangesAreReleased() throws Exception { for (String node : internalCluster().getNodeNames()) { @@ -145,23 +128,16 @@ protected void setRequestCircuitBreakerLimit(ByteSizeValue limit) { } } - protected EsqlQueryResponse run(String esqlCommands) { + protected final EsqlQueryResponse run(String esqlCommands) { return run(esqlCommands, randomPragmas()); } - protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas) { + protected final EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas) { return run(esqlCommands, pragmas, null); } protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter) { - return run(esqlCommands, pragmas, filter, null); - } - - protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter, String version) { - EsqlQueryRequest request = syncRequestOnLatestVersion(); - if (version != null) { - request.esqlVersion(version); - } + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query(esqlCommands); if (pragmas != null) { request.pragmas(pragmas); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 736a20b367b71..800067fef8b1c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -159,7 +159,7 @@ private void createRemoteIndex(int numDocs) throws Exception { public void testCancel() throws Exception { createRemoteIndex(between(10, 100)); - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM *:test | STATS total=sum(const) | LIMIT 1"); request.pragmas(randomPragmas()); PlainActionFuture requestFuture = new PlainActionFuture<>(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 77fc6987e07c3..12708fb626c36 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -457,7 +457,7 @@ public void testEnrichCoordinatorThenEnrichRemote() { } protected EsqlQueryResponse runQuery(String query) { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); if (randomBoolean()) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index 9021a10562124..8d1d81795bf46 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -156,7 +156,7 @@ public void testProfile() { waitForNoInitializingShards(client(REMOTE_CLUSTER), TimeValue.timeValueSeconds(30), "logs-2"); final int localOnlyProfiles; { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM logs* | stats sum(v)"); request.pragmas(pragmas); request.profile(true); @@ -171,7 +171,7 @@ public void testProfile() { } final int remoteOnlyProfiles; { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM *:logs* | stats sum(v)"); request.pragmas(pragmas); request.profile(true); @@ -186,7 +186,7 @@ public void testProfile() { } final int allProfiles; { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM logs*,*:logs* | stats total = sum(v)"); request.pragmas(pragmas); request.profile(true); @@ -203,7 +203,7 @@ public void testProfile() { } public void testWarnings() throws Exception { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM logs*,*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); PlainActionFuture future = new PlainActionFuture<>(); InternalTestCluster cluster = cluster(LOCAL_CLUSTER); @@ -229,7 +229,7 @@ public void testWarnings() throws Exception { } protected EsqlQueryResponse runQuery(String query) { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query(query); request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); return runQuery(request); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index c4adfb6885267..1298e3374665b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -328,7 +328,7 @@ public void testTopN() { } public void testProfile() { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.pragmas(randomPragmas()); request.query("from listens* | sort timestamp DESC | limit 1 | " + enrichSongCommand() + " | KEEP timestamp, artist"); request.profile(true); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index f16f5808da89f..089cb4a9a5084 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -130,7 +130,7 @@ public void testBreaker() { setRequestCircuitBreakerLimit(ByteSizeValue.ofBytes(between(256, 512))); try { final ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> { - var request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + var request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("from test_breaker | stats count_distinct(foo) by bar"); request.pragmas(randomPragmas()); try (var ignored = client().execute(EsqlQueryAction.INSTANCE, request).actionGet(2, TimeUnit.MINUTES)) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index d18bf0e23fd29..d3471450e4728 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -368,7 +368,7 @@ protected void doRun() throws Exception { try { scriptPermits.release(numberOfDocs()); // do not block Lucene operators Client client = client(coordinator); - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); client().admin() .indices() .prepareUpdateSettings("test") diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index 27edadb25ab26..e2e635917ed1c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -52,11 +52,8 @@ protected Collection> nodePlugins() { } @Override - protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter, String version) { - EsqlQueryRequest request = AbstractEsqlIntegTestCase.asyncSyncRequestOnLatestVersion(); - if (version != null) { - request.esqlVersion(version); - } + protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter) { + EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest(); request.query(esqlCommands); request.pragmas(pragmas); // deliberately small timeout, to frequently trigger incomplete response diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java index 445ca0414ed88..5b2425f18d62b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java @@ -68,7 +68,7 @@ public void testCollectWarnings() throws Exception { DiscoveryNode coordinator = randomFrom(clusterService().state().nodes().stream().toList()); client().admin().indices().prepareRefresh("index-1", "index-2").get(); - EsqlQueryRequest request = AbstractEsqlIntegTestCase.syncRequestOnLatestVersion(); + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); request.query("FROM index-* | EVAL ip = to_ip(host) | STATS s = COUNT(*) by ip | KEEP ip | LIMIT 100"); request.pragmas(randomPragmas()); CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index 08d10e5ca7763..542b220d398d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.parser.TypedParamValue; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; -import org.elasticsearch.xpack.esql.version.EsqlVersion; import java.io.IOException; import java.util.Iterator; @@ -41,7 +40,6 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private boolean async; - private String esqlVersion; private String query; private boolean columnar; private boolean profile; @@ -78,19 +76,6 @@ public EsqlQueryRequest(StreamInput in) throws IOException { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - if (Strings.hasText(esqlVersion) == false) { - validationException = addValidationError(invalidVersion("is required"), validationException); - } else { - EsqlVersion version = EsqlVersion.parse(esqlVersion); - if (version == null) { - validationException = addValidationError(invalidVersion("has invalid value [" + esqlVersion + "]"), validationException); - } else if (version == EsqlVersion.SNAPSHOT && onSnapshotBuild == false) { - validationException = addValidationError( - invalidVersion("with value [" + esqlVersion + "] only allowed in snapshot builds"), - validationException - ); - } - } if (Strings.hasText(query) == false) { validationException = addValidationError("[" + RequestXContent.QUERY_FIELD + "] is required", validationException); } @@ -111,27 +96,8 @@ public ActionRequestValidationException validate() { return validationException; } - private static String invalidVersion(String reason) { - return "[" - + RequestXContent.ESQL_VERSION_FIELD - + "] " - + reason - + ", latest available version is [" - + EsqlVersion.latestReleased().versionStringWithoutEmoji() - + "]"; - } - public EsqlQueryRequest() {} - public void esqlVersion(String esqlVersion) { - this.esqlVersion = esqlVersion; - } - - @Override - public String esqlVersion() { - return esqlVersion; - } - public void query(String query) { this.query = query; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index 9eeffbb35c10e..7df5c95cbc953 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.Build; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.core.esql.action.internal.SharedSecrets; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; -import org.elasticsearch.xpack.esql.version.EsqlVersion; public class EsqlQueryRequestBuilder extends org.elasticsearch.xpack.core.esql.action.EsqlQueryRequestBuilder< EsqlQueryRequest, @@ -29,14 +27,6 @@ public static EsqlQueryRequestBuilder newSyncEsqlQueryRequestBuilder(Elasticsear private EsqlQueryRequestBuilder(ElasticsearchClient client, EsqlQueryRequest request) { super(client, EsqlQueryAction.INSTANCE, request); - EsqlVersion version = Build.current().isSnapshot() ? EsqlVersion.SNAPSHOT : EsqlVersion.latestReleased(); - esqlVersion(version.versionStringWithoutEmoji()); - } - - @Override - public EsqlQueryRequestBuilder esqlVersion(String esqlVersion) { - request.esqlVersion(esqlVersion); - return this; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index 014d445f79564..9ffd48d9d0c3b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -74,7 +74,7 @@ static EsqlQueryRequest parseAsync(XContentParser parser) { } private static void objectParserCommon(ObjectParser parser) { - parser.declareString(EsqlQueryRequest::esqlVersion, ESQL_VERSION_FIELD); + parser.declareString((str, consumer) -> {}, ESQL_VERSION_FIELD); parser.declareString(EsqlQueryRequest::query, QUERY_FIELD); parser.declareBoolean(EsqlQueryRequest::columnar, COLUMNAR_FIELD); parser.declareObject(EsqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), FILTER_FIELD); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index ad47779fffbb6..bd2f8eb38f96f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -51,7 +51,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli esqlRequest = RequestXContent.parseAsync(parser); } - RestEsqlQueryAction.defaultVersionForOldClients(esqlRequest, request); LOGGER.debug("Beginning execution of ESQL async query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 268966422ce56..7f5adc310a535 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.esql.version.EsqlVersion; import java.io.IOException; import java.util.List; @@ -51,7 +50,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli esqlRequest = RequestXContent.parseSync(parser); } - defaultVersionForOldClients(esqlRequest, request); LOGGER.debug("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { @@ -68,41 +66,4 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli protected Set responseParams() { return Set.of(URL_PARAM_DELIMITER, EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION); } - - static final String PRODUCT_ORIGIN = "x-elastic-product-origin"; - static final String CLIENT_META = "x-elastic-client-meta"; - - /** - * Default the {@link EsqlQueryRequest#esqlVersion()} to the oldest version - * if we can detect that the request comes from an older version of the - * official client or an older version of kibana. These versions supported - * ESQL but ESQL was not GA, so, technically we can break - * them. But it's not hugely complicated to make them work smoothly on the - * upgrade that starts to require the {@code version} field. This does - * just that. - */ - static void defaultVersionForOldClients(EsqlQueryRequest esqlRequest, RestRequest restRequest) { - if (esqlRequest.esqlVersion() != null) { - return; - } - String clientMeta = restRequest.header(CLIENT_META); - if (clientMeta == null) { - return; - } - String product = restRequest.header(PRODUCT_ORIGIN); - if ("kibana".equals(product)) { - /* - * Kibana 8.11 to 8.13 used the 8.9 version of the javascript client. - * Kibana 8.14, the version we *want* to send the versions is on the - * 8.13 version of the javascript client. - */ - if (clientMeta.contains("es=8.9")) { - esqlRequest.esqlVersion(EsqlVersion.ROCKET.versionStringWithoutEmoji()); - } - return; - } - if (clientMeta.contains("es=8.13") || clientMeta.contains("es=8.12") || clientMeta.contains("es=8.11")) { - esqlRequest.esqlVersion(EsqlVersion.ROCKET.versionStringWithoutEmoji()); - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java index ffd27cffb3e60..929206ed58897 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java @@ -156,6 +156,7 @@ private IntFunction blockToJavaObject() { } case NULL -> offset -> null; case DOC -> throw new EsqlIllegalArgumentException("can't read values from [doc] block"); + case COMPOSITE -> throw new EsqlIllegalArgumentException("can't read values from [composite] block"); case UNKNOWN -> throw new EsqlIllegalArgumentException("can't read values from [" + block + "]"); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index 9c0fd82733a89..0df1ae078171d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.AggNameInput; -import org.elasticsearch.xpack.esql.core.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; @@ -52,12 +50,6 @@ protected TypeResolution resolveType() { return TypeResolutions.isExact(field, sourceText(), DEFAULT); } - @Override - protected Pipe makePipe() { - // unresolved AggNameInput (should always get replaced by the folder) - return new AggNameInput(source(), this, sourceText()); - } - @Override public int hashCode() { // NB: the hashcode is currently used for key generation so diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index 39ec47221c00c..97ba6feb6e278 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -117,6 +117,8 @@ static int estimateSize(DataType dataType) { case INT -> Integer.BYTES; case LONG -> Long.BYTES; case NULL -> 0; + // TODO: provide a specific estimate for aggregated_metrics_double + case COMPOSITE -> throw new EsqlIllegalArgumentException("can't estimate size for composite blocks"); case UNKNOWN -> throw new EsqlIllegalArgumentException("[unknown] can't be the result of field extraction"); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index c75b3d7f93b3a..70aa39a6319db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -146,16 +146,6 @@ static Query translate(InsensitiveEquals bc) { public static class BinaryComparisons extends ExpressionTranslator { @Override protected Query asQuery(BinaryComparison bc, TranslatorHandler handler) { - // TODO: Pretty sure this check is redundant with the one at the beginning of translate - ExpressionTranslators.BinaryComparisons.checkBinaryComparison(bc); - Query translated = translateOutOfRangeComparisons(bc); - if (translated != null) { - return handler.wrapFunctionQuery(bc, bc.left(), () -> translated); - } - return handler.wrapFunctionQuery(bc, bc.left(), () -> translate(bc, handler)); - } - - static Query translate(BinaryComparison bc, TranslatorHandler handler) { Check.isTrue( bc.right().foldable(), "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [{}]", @@ -164,6 +154,15 @@ static Query translate(BinaryComparison bc, TranslatorHandler handler) { Expressions.name(bc.right()), bc.symbol() ); + + Query translated = translateOutOfRangeComparisons(bc); + if (translated != null) { + return handler.wrapFunctionQuery(bc, bc.left(), () -> translated); + } + return handler.wrapFunctionQuery(bc, bc.left(), () -> translate(bc, handler)); + } + + static Query translate(BinaryComparison bc, TranslatorHandler handler) { TypedAttribute attribute = checkIsPushableAttribute(bc.left()); Source source = bc.source(); String name = handler.nameOf(attribute); @@ -353,8 +352,9 @@ public static Query doTranslate(ScalarFunction f, TranslatorHandler handler) { return handler.wrapFunctionQuery(f, cm.ipField(), () -> query); } } + // TODO we could optimize starts_with as well - return ExpressionTranslators.Scalars.doTranslate(f, handler); + throw new QlIllegalArgumentException("Cannot translate expression:[" + f.sourceText() + "]"); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java index 3dfaf98d8fbff..3c4ae41117273 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java @@ -9,13 +9,14 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; -import org.elasticsearch.xpack.esql.core.planner.QlTranslatorHandler; +import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -23,7 +24,7 @@ import java.util.function.Supplier; -public final class EsqlTranslatorHandler extends QlTranslatorHandler { +public final class EsqlTranslatorHandler implements TranslatorHandler { @Override public Query asQuery(Expression e) { @@ -56,4 +57,9 @@ public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier - * ESQL is a young language and we don't have the benefit of self-hosting - * its compiler. So we're going to make a lot of mistakes when designing it. - * As such, we expect it to change in backwards incompatible ways several - * times in 2024 and 2025. Hopefully we'll have learned our lesson and we'll - * settle down to one change every couple of years after that. - *

- *

- * For example, maybe we realize we've made a mistake with the {@link MvAvg} - * function and decide it should return the type of its input field rather - * than always returning a {@code double}. If we decide to make this change - * we'd have to bump the language version. We plan to batch changes like this - * into the {@link EsqlVersion#SNAPSHOT} version for a while and from time to - * time release them as a new version. - *

- *

- * We require a version to be sent on every request to the ESQL APIs so - * changing the version of a query is always opt-in. There is no REST request - * you can send to any ESQL endpoint that will default to a version of ESQL. - * That means we can release new versions of ESQL in a minor release of - * Elasticsearch. We can and we will. - *

- *

- * So users of Elasticsearch's clients don't need to think about the version - * of ESQL when they are getting started they we have a concept of "base version". - * This "base version" will remain constant for an entire major release of - * Elasticsearch and clients will send that version with ESQL requests unless - * otherwise configured. - *

- *

- * This is marked with {@link UpdateForV9} to remind us that we need to - * update the "base version" of ESQL in the client specification when - * we cut a new major. We'll need to do that on every major - and also bump the {@link UpdateForV9} annotation. - *

- */ -public enum EsqlVersion implements VersionId { - /** - * Breaking changes go here until the next version is released. - */ - SNAPSHOT(Integer.MAX_VALUE, 12, 99, "📷"), - ROCKET(2024, 4, "🚀"); - - static final Map VERSION_MAP_WITH_AND_WITHOUT_EMOJI = versionMapWithAndWithoutEmoji(); - private static final EsqlVersion[] RELEASED_ASCENDING = createReleasedAscending(); - - private static Map versionMapWithAndWithoutEmoji() { - Map stringToVersion = new LinkedHashMap<>(EsqlVersion.values().length * 2); - - for (EsqlVersion version : EsqlVersion.values()) { - putVersionCheckNoDups(stringToVersion, version.versionStringWithoutEmoji(), version); - putVersionCheckNoDups(stringToVersion, version.toString(), version); - } - - return stringToVersion; - } - - private static EsqlVersion[] createReleasedAscending() { - return Arrays.stream(EsqlVersion.values()) - .filter(v -> v != SNAPSHOT) - .sorted(Comparator.comparingInt(EsqlVersion::id)) - .toArray(EsqlVersion[]::new); - } - - private static void putVersionCheckNoDups(Map stringToVersion, String versionString, EsqlVersion version) { - EsqlVersion existingVersionForKey = stringToVersion.put(versionString, version); - if (existingVersionForKey != null) { - throw new IllegalArgumentException("Duplicate esql version with version string [" + versionString + "]"); - } - } - - /** - * Accepts a version string with the emoji suffix or without it. - * E.g. both "2024.04.01.🚀" and "2024.04.01" will be interpreted as {@link EsqlVersion#ROCKET}. - */ - public static EsqlVersion parse(String versionString) { - return VERSION_MAP_WITH_AND_WITHOUT_EMOJI.get(versionString); - } - - /** - * Return the released versions in ascending order. - */ - public static EsqlVersion[] releasedAscending() { - return RELEASED_ASCENDING; - } - - public static EsqlVersion latestReleased() { - return RELEASED_ASCENDING[RELEASED_ASCENDING.length - 1]; - } - - private int year; - private byte month; - private byte revision; - private String emoji; - - EsqlVersion(int year, int month, String emoji) { - this(year, month, 1, emoji); - } - - EsqlVersion(int year, int month, int revision, String emoji) { - if ((1 <= revision && revision <= 99) == false) { - throw new IllegalArgumentException("Version revision number must be between 1 and 99 but was [" + revision + "]"); - } - if ((1 <= month && month <= 12) == false) { - throw new IllegalArgumentException("Version month must be between 1 and 12 but was [" + month + "]"); - } - if ((emoji.codePointCount(0, emoji.length()) == 1) == false) { - throw new IllegalArgumentException("Version emoji must be a single unicode character but was [" + emoji + "]"); - } - this.year = year; - this.month = (byte) month; - this.revision = (byte) revision; - this.emoji = emoji; - } - - public int year() { - return year; - } - - public byte month() { - return month; - } - - public byte revision() { - return revision; - } - - public String emoji() { - return emoji; - } - - public String versionStringWithoutEmoji() { - return this == SNAPSHOT ? "snapshot" : Strings.format("%d.%02d.%02d", year, month, revision); - } - - @Override - public String toString() { - return versionStringWithoutEmoji() + "." + emoji; - } - - @Override - public int id() { - return this == SNAPSHOT ? Integer.MAX_VALUE : (10000 * year + 100 * month + revision); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index 6360d65a11165..317fabf0ac434 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.action; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Build; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.io.Streams; @@ -35,8 +34,6 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.parser.TypedParamValue; -import org.elasticsearch.xpack.esql.version.EsqlVersion; -import org.elasticsearch.xpack.esql.version.EsqlVersionTests; import java.io.IOException; import java.util.ArrayList; @@ -58,23 +55,20 @@ public void testParseFields() throws IOException { boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); - EsqlVersion esqlVersion = randomFrom(EsqlVersion.values()); List params = randomParameters(); boolean hasParams = params.isEmpty() == false; StringBuilder paramsString = paramsString(params, hasParams); String json = String.format(Locale.ROOT, """ { - "version": "%s", "query": "%s", "columnar": %s, "locale": "%s", "filter": %s - %s""", esqlVersion, query, columnar, locale.toLanguageTag(), filter, paramsString); + %s""", query, columnar, locale.toLanguageTag(), filter, paramsString); EsqlQueryRequest request = parseEsqlQueryRequestSync(json); - assertEquals(esqlVersion.toString(), request.esqlVersion()); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); @@ -92,7 +86,6 @@ public void testParseFieldsForAsync() throws IOException { boolean columnar = randomBoolean(); Locale locale = randomLocale(random()); QueryBuilder filter = randomQueryBuilder(); - EsqlVersion esqlVersion = randomFrom(EsqlVersion.values()); List params = randomParameters(); boolean hasParams = params.isEmpty() == false; @@ -104,7 +97,6 @@ public void testParseFieldsForAsync() throws IOException { Locale.ROOT, """ { - "version": "%s", "query": "%s", "columnar": %s, "locale": "%s", @@ -113,7 +105,6 @@ public void testParseFieldsForAsync() throws IOException { "wait_for_completion_timeout": "%s", "keep_alive": "%s" %s""", - esqlVersion, query, columnar, locale.toLanguageTag(), @@ -126,7 +117,6 @@ public void testParseFieldsForAsync() throws IOException { EsqlQueryRequest request = parseEsqlQueryRequestAsync(json); - assertEquals(esqlVersion.toString(), request.esqlVersion()); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); @@ -170,86 +160,24 @@ public void testRejectUnknownFields() { }""", "unknown field [asdf]"); } - public void testKnownStableVersionIsValid() throws IOException { - for (EsqlVersion version : EsqlVersion.values()) { - if (version == EsqlVersion.SNAPSHOT) { - // Not stable, skip. Also avoids breaking the CI as this is invalid for non-SNAPSHOT builds. - continue; - } - - String validVersionString = randomBoolean() ? version.versionStringWithoutEmoji() : version.toString(); - - String json = String.format(Locale.ROOT, """ - { - "version": "%s", - "query": "ROW x = 1" - } - """, validVersionString); - - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); - assertNull(request.validate()); - - request = parseEsqlQueryRequestAsync(json); - assertNull(request.validate()); - } - } - - public void testUnknownVersionIsNotValid() throws IOException { - String invalidVersionString = EsqlVersionTests.randomInvalidVersionString(); + public void testAnyVersionIsValid() throws IOException { + String validVersionString = randomAlphaOfLength(5); String json = String.format(Locale.ROOT, """ { "version": "%s", "query": "ROW x = 1" } - """, invalidVersionString); + """, validVersionString); EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); - assertNotNull(request.validate()); - assertThat( - request.validate().getMessage(), - containsString( - "[version] has invalid value [" - + invalidVersionString - + "], latest available version is [" - + EsqlVersion.latestReleased().versionStringWithoutEmoji() - + "]" - ) - ); - } - - public void testSnapshotVersionIsOnlyValidOnSnapshot() throws IOException { - String esqlVersion = randomBoolean() ? "snapshot" : "snapshot.📷"; - String json = String.format(Locale.ROOT, """ - { - "version": "%s", - "query": "ROW x = 1" - } - """, esqlVersion); - EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); - - String errorOnNonSnapshotBuilds = "[version] with value [" - + esqlVersion - + "] only allowed in snapshot builds, latest available version is [" - + EsqlVersion.latestReleased().versionStringWithoutEmoji() - + "]"; - - if (Build.current().isSnapshot()) { - assertNull(request.validate()); - } else { - assertNotNull(request.validate()); - assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); - } - - request.onSnapshotBuild(true); assertNull(request.validate()); - request.onSnapshotBuild(false); - assertNotNull(request.validate()); - assertThat(request.validate().getMessage(), containsString(errorOnNonSnapshotBuilds)); + request = parseEsqlQueryRequestAsync(json); + assertNull(request.validate()); } - public void testMissingVersionIsNotValid() throws IOException { + public void testMissingVersionIsValid() throws IOException { String missingVersion = randomBoolean() ? "" : ", \"version\": \"\""; String json = String.format(Locale.ROOT, """ { @@ -259,13 +187,7 @@ public void testMissingVersionIsNotValid() throws IOException { }""", missingVersion); EsqlQueryRequest request = parseEsqlQueryRequest(json, randomBoolean()); - assertNotNull(request.validate()); - assertThat( - request.validate().getMessage(), - containsString( - "[version] is required, latest available version is [" + EsqlVersion.latestReleased().versionStringWithoutEmoji() + "]" - ) - ); + assertNull(request.validate()); } public void testMissingQueryIsNotValid() throws IOException { @@ -282,7 +204,6 @@ public void testMissingQueryIsNotValid() throws IOException { public void testPragmasOnlyValidOnSnapshot() throws IOException { String json = """ { - "version": "2024.04.01", "query": "ROW x = 1", "pragma": {"foo": "bar"} } @@ -300,7 +221,6 @@ public void testPragmasOnlyValidOnSnapshot() throws IOException { public void testTablesKeyword() throws IOException { String json = """ { - "version": "2024.04.01", "query": "ROW x = 1", "tables": {"a": {"c:keyword": ["a", "b", null, 1, 2.0, ["c", "d"], false]}} } @@ -332,7 +252,6 @@ public void testTablesKeyword() throws IOException { public void testTablesInteger() throws IOException { String json = """ { - "version": "2024.04.01", "query": "ROW x = 1", "tables": {"a": {"c:integer": [1, 2, "3", null, [5, 6]]}} } @@ -361,7 +280,6 @@ public void testTablesInteger() throws IOException { public void testTablesLong() throws IOException { String json = """ { - "version": "2024.04.01", "query": "ROW x = 1", "tables": {"a": {"c:long": [1, 2, "3", null, [5, 6]]}} } @@ -390,7 +308,6 @@ public void testTablesLong() throws IOException { public void testManyTables() throws IOException { String json = """ { - "version": "2024.04.01", "query": "ROW x = 1", "tables": { "t1": { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java deleted file mode 100644 index 6ee720e6a7334..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryActionTests.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.action; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.xpack.esql.version.EsqlVersion; -import org.hamcrest.Matcher; - -import java.util.List; -import java.util.function.Supplier; - -import static org.elasticsearch.xpack.esql.action.RestEsqlQueryAction.CLIENT_META; -import static org.elasticsearch.xpack.esql.action.RestEsqlQueryAction.PRODUCT_ORIGIN; -import static org.elasticsearch.xpack.esql.action.RestEsqlQueryAction.defaultVersionForOldClients; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -public class RestEsqlQueryActionTests extends ESTestCase { - public void testNoVersionForNoClient() { - assertEsqlVersion(null, null, nullValue(String.class)); - } - - public void testNoVersionForAlreadySet() { - EsqlQueryRequest esqlRequest = new EsqlQueryRequest(); - esqlRequest.esqlVersion("whatever"); - FakeRestRequest restRequest = new FakeRestRequest(); - Supplier version = randomFrom( - () -> "es=8.1" + between(0, 3), // Versions we would rewrite. - () -> "es=8.1" + between(4, 9), // We wouldn't rewrite these anyway, but let's try it sometimes. - () -> "es=8." + between(0, 9) + between(0, 9), // These will rarely spit out versions we would rewrite. Either is fine. - () -> "es=" + between(0, 9) + "." + between(0, 9) + between(0, 9) - ); - restRequest.getHttpRequest().getHeaders().put(CLIENT_META, List.of("es=8.13.0")); - defaultVersionForOldClients(esqlRequest, restRequest); - assertThat(esqlRequest.esqlVersion(), equalTo("whatever")); - } - - public void testNoVersionForNewClient() { - Supplier version = randomFrom( - () -> "es=8.14", - () -> "es=8.2" + between(0, 9), - () -> "es=8." + between(3, 9) + between(0, 9), - () -> "es=9." + between(0, 9) + between(0, 9), - () -> "es=" + between(0, 9) + between(0, 9) + "." + between(0, 9) + between(0, 9) - ); - assertEsqlVersion(version.get(), randomProduct(), nullValue(String.class)); - } - - public void testAddsVersionForPython813() { - assertAddsOldest( - randomFrom( - "es=8.13.0,py=3.11.8,t=8.13.0,ur=2.2.1", // This is what the python client sent for me on 2024-4-12 - "py=3.11.8,es=8.13.0,ur=2.2.1,t=8.13.0", // This is just a jumbled version of the above - "es=8.13" // This is all we need to trigger it - ), - randomProduct() - ); - } - - public void testAddsVersionForPython812() { - assertAddsOldest( - randomFrom( - "es=8.12.0,py=3.11.8,t=8.13.0,ur=2.2.1", // This is what the python client sent for me on 2024-4-12 - "py=3.11.8,t=8.13.0,es=8.12.0,ur=2.2.1", // This is just a jumbled version of the above - "es=8.12" // This is all we need to trigger it - ), - randomProduct() - ); - } - - public void testNoVersionForKibana814() { - assertEsqlVersion("es=8.13", "kibana", nullValue(String.class)); - } - - public void testAddsVersionForKibana813() { - assertAddsOldest( - randomFrom( - "es=8.9.1p,js=20.12.2,t=8.3.3,hc=20.12.2", // This is what kibana sent on 2024-4-12 - "js=20.12.2,es=8.9.1p,t=8.3.3,hc=20.12.2", // This is just a jumbled version of the above - "es=8.9" // This is all we need to trigger it - ), - "kibana" - ); - } - - public void testAddsVersionForKibana812() { - assertAddsOldest( - randomFrom( - "es=8.9.1p,js=18.19.1,t=8.3.3,hc=18.19.1", // This is what kibana sent on 2024-4-12 - "js=18.19.1,t=8.3.3,es=8.9.1p,hc=18.19.1", // This is just a jumbled version of the above - "es=8.9" // This is all we need to trigger it - ), - "kibana" - ); - } - - public void testAddsVersionForKibana811() { - assertAddsOldest( - randomFrom( - "es=8.9.1p,js=18.18.2,t=8.3.3,hc=18.18.2", // This is what kibana sent on 2024-4-12 - "js=18.18.2,es=8.9.1p,t=8.3.3,hc=18.18.2", // This is just a jumbled version of the above - "es=8.9" // This is all we need to trigger it - ), - "kibana" - ); - } - - private void assertAddsOldest(String clientMeta, String elasticProductOrigin) { - assertEsqlVersion(clientMeta, elasticProductOrigin, equalTo(EsqlVersion.ROCKET.versionStringWithoutEmoji())); - } - - private void assertEsqlVersion(String clientMeta, String elasticProductOrigin, Matcher expectedEsqlVersion) { - EsqlQueryRequest esqlRequest = new EsqlQueryRequest(); - FakeRestRequest restRequest = new FakeRestRequest(); - if (clientMeta != null) { - restRequest.getHttpRequest().getHeaders().put(CLIENT_META, List.of(clientMeta)); - } - if (elasticProductOrigin != null) { - restRequest.getHttpRequest().getHeaders().put(PRODUCT_ORIGIN, List.of(elasticProductOrigin)); - } - defaultVersionForOldClients(esqlRequest, restRequest); - assertThat(esqlRequest.esqlVersion(), expectedEsqlVersion); - } - - /** - * Returns {@code null} or a random string that isn't {@code kibana}. - */ - private String randomProduct() { - return randomBoolean() ? null : randomAlphaOfLength(3); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/CanonicalTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java similarity index 88% rename from x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/CanonicalTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java index 9d76efa78edd5..cbac81700a81a 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/CanonicalTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java @@ -5,30 +5,33 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression; +package org.elasticsearch.xpack.esql.expression; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.TestUtils; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Add; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Div; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Mod; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Mul; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Sub; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.GreaterThan; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.GreaterThanOrEqual; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.LessThan; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DataTypes; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import java.time.ZoneId; import java.util.ArrayList; diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/NamedExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java similarity index 78% rename from x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/NamedExpressionTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java index 0bbead373ffe2..536a24e6ce5c4 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/NamedExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NamedExpressionTests.java @@ -4,22 +4,22 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.TestUtils; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Add; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Div; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Mod; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Mul; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Neg; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.Sub; import org.elasticsearch.xpack.esql.core.tree.Location; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataTypes; import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java new file mode 100644 index 0000000000000..f7d562177746d --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.TestUtils; +import org.elasticsearch.xpack.esql.core.expression.Literal; + +import java.util.Arrays; + +import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; +import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; + +public class InTests extends ESTestCase { + + private static final Literal ONE = L(1); + private static final Literal TWO = L(2); + private static final Literal THREE = L(3); + + public void testInWithContainedValue() { + In in = new In(EMPTY, TWO, Arrays.asList(ONE, TWO, THREE)); + assertTrue(in.fold()); + } + + public void testInWithNotContainedValue() { + In in = new In(EMPTY, THREE, Arrays.asList(ONE, TWO)); + assertFalse(in.fold()); + } + + public void testHandleNullOnLeftValue() { + In in = new In(EMPTY, NULL, Arrays.asList(ONE, TWO, THREE)); + assertNull(in.fold()); + in = new In(EMPTY, NULL, Arrays.asList(ONE, NULL, THREE)); + assertNull(in.fold()); + + } + + public void testHandleNullsOnRightValue() { + In in = new In(EMPTY, THREE, Arrays.asList(ONE, NULL, THREE)); + assertTrue(in.fold()); + in = new In(EMPTY, ONE, Arrays.asList(TWO, NULL, THREE)); + assertNull(in.fold()); + } + + private static Literal L(Object value) { + return TestUtils.of(EMPTY, value); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 97807d84843dc..4674e8afc07e9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -30,6 +29,7 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; @@ -394,13 +394,7 @@ public void testIsNotNullOnFunctionWithOneField() { EsRelation relation = relation(); var fieldA = getFieldAttribute("a"); var pattern = L("abc"); - Expression inn = isNotNull( - new And( - EMPTY, - new OptimizerRulesTests.TestStartsWith(EMPTY, fieldA, pattern, false), - greaterThanOf(new Add(EMPTY, ONE, TWO), THREE) - ) - ); + Expression inn = isNotNull(new And(EMPTY, new StartsWith(EMPTY, fieldA, pattern), greaterThanOf(new Add(EMPTY, ONE, TWO), THREE))); Filter f = new Filter(EMPTY, relation, inn); Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); @@ -412,8 +406,7 @@ public void testIsNotNullOnFunctionWithTwoFields() { EsRelation relation = relation(); var fieldA = getFieldAttribute("a"); var fieldB = getFieldAttribute("b"); - var pattern = L("abc"); - Expression inn = isNotNull(new OptimizerRulesTests.TestStartsWith(EMPTY, fieldA, fieldB, false)); + Expression inn = isNotNull(new StartsWith(EMPTY, fieldA, fieldB)); Filter f = new Filter(EMPTY, relation, inn); Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java deleted file mode 100644 index cd4fd77a8dd22..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/version/EsqlVersionTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.version; - -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -public class EsqlVersionTests extends ESTestCase { - public void testLatestReleased() { - assertThat(EsqlVersion.latestReleased(), is(EsqlVersion.ROCKET)); - } - - public void testVersionString() { - assertThat(EsqlVersion.SNAPSHOT.toString(), equalTo("snapshot.📷")); - assertThat(EsqlVersion.ROCKET.toString(), equalTo("2024.04.01.🚀")); - } - - public void testVersionId() { - assertThat(EsqlVersion.SNAPSHOT.id(), equalTo(Integer.MAX_VALUE)); - assertThat(EsqlVersion.ROCKET.id(), equalTo(20240401)); - - for (EsqlVersion version : EsqlVersion.values()) { - assertTrue(EsqlVersion.SNAPSHOT.onOrAfter(version)); - if (version != EsqlVersion.SNAPSHOT) { - assertTrue(version.before(EsqlVersion.SNAPSHOT)); - } else { - assertTrue(version.onOrAfter(EsqlVersion.SNAPSHOT)); - } - } - - List versionsSortedAsc = Arrays.stream(EsqlVersion.values()) - .sorted(Comparator.comparing(EsqlVersion::year).thenComparing(EsqlVersion::month).thenComparing(EsqlVersion::revision)) - .toList(); - for (int i = 0; i < versionsSortedAsc.size() - 1; i++) { - assertTrue(versionsSortedAsc.get(i).before(versionsSortedAsc.get(i + 1))); - } - } - - public void testVersionStringNoEmoji() { - for (EsqlVersion version : EsqlVersion.values()) { - String[] versionSegments = version.toString().split("\\."); - String[] parsingPrefixSegments = Arrays.copyOf(versionSegments, versionSegments.length - 1); - - String expectedParsingPrefix = String.join(".", parsingPrefixSegments); - assertThat(version.versionStringWithoutEmoji(), equalTo(expectedParsingPrefix)); - } - } - - public void testParsing() { - for (EsqlVersion version : EsqlVersion.values()) { - String versionStringWithoutEmoji = version.versionStringWithoutEmoji(); - - assertThat(EsqlVersion.parse(versionStringWithoutEmoji), is(version)); - assertThat(EsqlVersion.parse(versionStringWithoutEmoji + "." + version.emoji()), is(version)); - } - - assertNull(EsqlVersion.parse(randomInvalidVersionString())); - } - - public static String randomInvalidVersionString() { - String[] invalidVersionString = new String[1]; - - do { - int length = randomIntBetween(1, 10); - invalidVersionString[0] = randomAlphaOfLength(length); - } while (EsqlVersion.VERSION_MAP_WITH_AND_WITHOUT_EMOJI.containsKey(invalidVersionString[0])); - - return invalidVersionString[0]; - } -} diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index 87180f0abfc71..0e3d0f1b2ec40 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -851,13 +851,16 @@ public void testSearchableSnapshotsInHotPhasePinnedToHotNodes() throws Exception ) ); - indexDocument(client(), dataStream, true); - String firstGenIndex = DataStream.getDefaultBackingIndexName(dataStream, 1L); + // Create the data stream. + assertOK(client().performRequest(new Request("PUT", "_data_stream/" + dataStream))); + + var backingIndices = getBackingIndices(client(), dataStream); + String firstGenIndex = backingIndices.get(0); Map indexSettings = getIndexSettingsAsMap(firstGenIndex); assertThat(indexSettings.get(DataTier.TIER_PREFERENCE), is("data_hot")); // rollover the data stream so searchable_snapshot can complete - rolloverMaxOneDocCondition(client(), dataStream); + indexDocument(client(), dataStream, true); final String restoredIndex = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + firstGenIndex; assertBusy(() -> { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java index 2d775dfe13ffd..42d1955f0d453 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java @@ -228,7 +228,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources "Index Lifecycle Management is not running", createDetails(verbose, ilmMetadata, currentMode), AUTOMATION_DISABLED_IMPACT, - List.of(ILM_NOT_RUNNING) + verbose ? List.of(ILM_NOT_RUNNING) : List.of() ); } else { var stagnatingIndices = stagnatingIndicesFinder.find(); @@ -248,7 +248,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources + " stayed on the same action longer than expected.", createDetails(verbose, ilmMetadata, currentMode, stagnatingIndices), STAGNATING_INDEX_IMPACT, - createDiagnoses(stagnatingIndices, maxAffectedResourcesCount) + verbose ? createDiagnoses(stagnatingIndices, maxAffectedResourcesCount) : List.of() ); } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java index d8f8014850c8f..9e2a67caac253 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.health.Diagnosis; +import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.ImpactArea; @@ -82,49 +83,6 @@ public void testIsGreenWhenRunningAndPoliciesConfiguredAndNoStagnatingIndices() verify(stagnatingIndicesFinder, times(1)).find(); } - public void testIsYellowIfThereIsOneStagnatingIndicesAndDetailsEmptyIfNoVerbose() throws IOException { - var clusterState = createClusterStateWith(new IndexLifecycleMetadata(createIlmPolicy(), RUNNING)); - var action = randomAction(); - var policyName = randomAlphaOfLength(10); - var indexName = randomAlphaOfLength(10); - var stagnatingIndicesFinder = mockedStagnatingIndicesFinder(List.of(indexMetadata(indexName, policyName, action))); - var service = createIlmHealthIndicatorService(clusterState, stagnatingIndicesFinder); - - var indicatorResult = service.calculate(false, HealthInfo.EMPTY_HEALTH_INFO); - - assertEquals(indicatorResult.name(), NAME); - assertEquals(indicatorResult.status(), YELLOW); - assertEquals(indicatorResult.symptom(), "An index has stayed on the same action longer than expected."); - assertEquals(xContentToMap(indicatorResult.details()), Map.of()); - assertThat(indicatorResult.impacts(), hasSize(1)); - assertThat( - indicatorResult.impacts().get(0), - equalTo( - new HealthIndicatorImpact( - NAME, - IlmHealthIndicatorService.STAGNATING_INDEX_IMPACT_ID, - 3, - "Automatic index lifecycle and data retention management cannot make progress on one or more indices. " - + "The performance and stability of the indices and/or the cluster could be impacted.", - List.of(ImpactArea.DEPLOYMENT_MANAGEMENT) - ) - ) - ); - assertThat(indicatorResult.diagnosisList(), hasSize(1)); - assertEquals(indicatorResult.diagnosisList().get(0).definition(), STAGNATING_ACTION_DEFINITIONS.get(action)); - - var affectedResources = indicatorResult.diagnosisList().get(0).affectedResources(); - assertThat(affectedResources, hasSize(2)); - assertEquals(affectedResources.get(0).getType(), Diagnosis.Resource.Type.ILM_POLICY); - assertThat(affectedResources.get(0).getValues(), hasSize(1)); - assertThat(affectedResources.get(0).getValues(), containsInAnyOrder(policyName)); - assertThat(affectedResources.get(1).getValues(), hasSize(1)); - assertEquals(affectedResources.get(1).getType(), Diagnosis.Resource.Type.INDEX); - assertThat(affectedResources.get(1).getValues(), containsInAnyOrder(indexName)); - - verify(stagnatingIndicesFinder, times(1)).find(); - } - public void testIsYellowIfThereIsOneStagnatingIndices() throws IOException { var clusterState = createClusterStateWith(new IndexLifecycleMetadata(createIlmPolicy(), RUNNING)); var action = randomAction(); @@ -279,6 +237,36 @@ public void testIsGreenWhenNoMetadata() { verifyNoInteractions(stagnatingIndicesFinder); } + public void testSkippingFieldsWhenVerboseIsFalse() { + var status = randomFrom(STOPPED, STOPPING); + var clusterState = createClusterStateWith(new IndexLifecycleMetadata(createIlmPolicy(), status)); + var stagnatingIndicesFinder = mockedStagnatingIndicesFinder(List.of()); + var service = createIlmHealthIndicatorService(clusterState, stagnatingIndicesFinder); + + assertThat( + service.calculate(false, HealthInfo.EMPTY_HEALTH_INFO), + equalTo( + new HealthIndicatorResult( + NAME, + YELLOW, + "Index Lifecycle Management is not running", + HealthIndicatorDetails.EMPTY, + Collections.singletonList( + new HealthIndicatorImpact( + NAME, + IlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID, + 3, + "Automatic index lifecycle and data retention management is disabled. The performance and stability of the " + + "cluster could be impacted.", + List.of(ImpactArea.DEPLOYMENT_MANAGEMENT) + ) + ), + List.of() + ) + ) + ); + } + // We expose the indicator name and the diagnoses in the x-pack usage API. In order to index them properly in a telemetry index // they need to be declared in the health-api-indexer.edn in the telemetry repository. public void testMappedFieldsForTelemetry() { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 300c0d2c471dc..1602aa3af5e98 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -54,6 +54,7 @@ protected Collection> nodePlugins() { return Arrays.asList(Utils.TestInferencePlugin.class); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109005") public void testBulkOperations() throws Exception { Map shardsSettings = Collections.singletonMap(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)); indicesAdmin().prepareCreate(INDEX_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index d82979bfb71e7..aa3bfb6c224f5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -10,7 +10,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.MappedActionFilter; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -286,7 +286,7 @@ public Map getMappers() { } @Override - public Collection getActionFilters() { + public Collection getMappedActionFilters() { if (SemanticTextFeature.isEnabled()) { return singletonList(shardBulkInferenceActionFilter.get()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index 38d8b8d9b35c0..573e77a58991c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -81,12 +81,6 @@ public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceR this.batchSize = batchSize; } - @Override - public int order() { - // must execute last (after the security action filter) - return Integer.MAX_VALUE; - } - @Override public String actionName() { return TransportShardBulkAction.ACTION_NAME; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 0e52bdd5f8713..e6cfd565c2a17 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xpack.inference.common.Truncator; @@ -141,4 +142,15 @@ public List getInferenceServiceFactories() { ); } } + + public static Model getInvalidModel(String inferenceEntityId, String serviceName) { + var mockConfigs = mock(ModelConfigurations.class); + when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); + when(mockConfigs.getService()).thenReturn(serviceName); + + var mockModel = mock(Model.class); + when(mockModel.getConfigurations()).thenReturn(mockConfigs); + + return mockModel; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java deleted file mode 100644 index bfb019d2f8f59..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services; - -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelConfigurations; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class Utils { - public static Model getInvalidModel(String inferenceEntityId, String serviceName) { - var mockConfigs = mock(ModelConfigurations.class); - when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); - when(mockConfigs.getService()).thenReturn(serviceName); - - var mockModel = mock(Model.class); - when(mockModel.getConfigurations()).thenReturn(mockConfigs); - - return mockModel; - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index 51593c8d052d9..5869366ac2e22 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -62,6 +62,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -69,7 +70,6 @@ import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.services.azureaistudio.AzureAiStudioConstants.API_KEY_FIELD; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index aafdf6c2bd57a..9fe8b472b22a5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -55,6 +55,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -63,7 +64,6 @@ import static org.elasticsearch.xpack.inference.results.ChunkedTextEmbeddingResultsTests.asMapWithListsInsteadOfArrays; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettingsTests.getAzureOpenAiSecretSettingsMap; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettingsTests.getPersistentAzureOpenAiServiceSettingsMap; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettingsTests.getRequestAzureOpenAiServiceSettingsMap; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index ea31e79755fb6..f06fee4b0b9c4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -58,13 +58,13 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMapEmpty; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; @@ -1282,25 +1282,25 @@ private CohereService createCohereService() { return new CohereService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool)); } - private PeristedConfig getPersistedConfigMap( + private PersistedConfig getPersistedConfigMap( Map serviceSettings, Map taskSettings, Map secretSettings ) { - return new PeristedConfig( + return new PersistedConfig( new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) ); } - private PeristedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { + private PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { - return new PeristedConfig( + return new PersistedConfig( new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), null ); } - private record PeristedConfig(Map config, Map secrets) {} + private record PersistedConfig(Map config, Map secrets) {} } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index 73c013af7b117..398b21312a03a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -29,9 +29,9 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.hamcrest.CoreMatchers.is; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 79d18d6ae2f27..cbac29c452772 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -55,6 +55,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -62,7 +63,6 @@ import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsServiceSettingsTests.getServiceSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index ad54c0c7746b9..81abe3dc5c088 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -29,6 +30,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -41,6 +43,7 @@ import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.index.mapper.TimeSeriesParams.MetricType; import org.elasticsearch.index.mapper.ValueFetcher; +import org.elasticsearch.index.mapper.XContentDataHelper; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.script.ScriptCompiler; @@ -587,6 +590,12 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio XContentParser.Token token; XContentSubParser subParser = null; EnumMap metricsParsed = new EnumMap<>(Metric.class); + // Preserves the content of the field in order to be able to construct synthetic source + // if field value is malformed. + XContentBuilder malformedContentForSyntheticSource = context.mappingLookup().isSourceSynthetic() && ignoreMalformed + ? XContentBuilder.builder(context.parser().contentType().xContent()) + : null; + try { token = context.parser().currentToken(); if (token == XContentParser.Token.VALUE_NULL) { @@ -596,6 +605,9 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio ensureExpectedToken(XContentParser.Token.START_OBJECT, token, context.parser()); subParser = new XContentSubParser(context.parser()); token = subParser.nextToken(); + if (malformedContentForSyntheticSource != null) { + malformedContentForSyntheticSource.startObject(); + } while (token != XContentParser.Token.END_OBJECT) { // should be an object sub-field with name a metric name ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, subParser); @@ -609,13 +621,20 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } token = subParser.nextToken(); + if (malformedContentForSyntheticSource != null) { + malformedContentForSyntheticSource.field(fieldName); + } // Make sure that the value is a number. Probably this will change when // new aggregate metric types are added (histogram, cardinality etc) ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, subParser); NumberFieldMapper delegateFieldMapper = metricFieldMappers.get(metric); // Delegate parsing the field to a numeric field mapper try { - metricsParsed.put(metric, delegateFieldMapper.value(context.parser())); + Number metricValue = delegateFieldMapper.value(context.parser()); + metricsParsed.put(metric, metricValue); + if (malformedContentForSyntheticSource != null) { + malformedContentForSyntheticSource.value(metricValue); + } } catch (IllegalArgumentException e) { throw new IllegalArgumentException("failed to parse [" + metric.name() + "] sub field: " + e.getMessage(), e); } @@ -658,10 +677,26 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } } catch (Exception e) { if (ignoreMalformed) { - if (subParser != null) { - // close the subParser so we advance to the end of the object + if (malformedContentForSyntheticSource != null) { + if (subParser != null) { + // Remaining data in parser needs to be stored as is in order to provide it in synthetic source. + XContentHelper.drainAndClose(subParser, malformedContentForSyntheticSource); + } else { + // We don't use DrainingXContentParser since we don't want to go beyond current field + malformedContentForSyntheticSource.copyCurrentStructure(context.parser()); + } + ; + var nameValue = IgnoredSourceFieldMapper.NameValue.fromContext( + context, + name(), + XContentDataHelper.encodeXContentBuilder(malformedContentForSyntheticSource) + ); + context.addIgnoredField(nameValue); + } else if (subParser != null) { + // close the subParser, so we advance to the end of the object subParser.close(); } + context.addIgnoredField(name()); context.path().remove(); return; @@ -689,11 +724,7 @@ protected SyntheticSourceMode syntheticSourceMode() { @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - if (ignoreMalformed) { - throw new IllegalArgumentException( - "field [" + name() + "] of type [" + typeName() + "] doesn't support synthetic source because it ignores malformed numbers" - ); - } + // Note that malformed values are handled via `IgnoredSourceFieldMapper` infrastructure return new AggregateMetricSyntheticFieldLoader(name(), simpleName(), metrics); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java index 6646db4f2abf9..83e701486d93a 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/test/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapperTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin; @@ -33,11 +34,12 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Supplier; +import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.IGNORE_MALFORMED; import static org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Names.METRICS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.matchesPattern; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.IsInstanceOf.instanceOf; @@ -467,8 +469,12 @@ public void testMetricType() throws IOException { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - assumeFalse("synthetic _source support for aggregate_double_metric doesn't support ignore_malformed", ignoreMalformed); - return new AggregateDoubleMetricSyntheticSourceSupport(); + return new AggregateDoubleMetricSyntheticSourceSupport(ignoreMalformed); + } + + @Override + public void testSyntheticSourceIgnoreMalformedExamples() { + assumeTrue("Scenarios are covered in scope of syntheticSourceSupport", false); } @Override @@ -478,16 +484,94 @@ protected IngestScriptSupport ingestScriptSupport() { protected final class AggregateDoubleMetricSyntheticSourceSupport implements SyntheticSourceSupport { - private final EnumSet storedMetrics = EnumSet.copyOf(randomNonEmptySubsetOf(Arrays.asList(Metric.values()))); + private final boolean malformedExample; + private final EnumSet storedMetrics; + + public AggregateDoubleMetricSyntheticSourceSupport(boolean malformedExample) { + this.malformedExample = malformedExample; + this.storedMetrics = EnumSet.copyOf(randomNonEmptySubsetOf(Arrays.asList(Metric.values()))); + } @Override public SyntheticSourceExample example(int maxVals) { // aggregate_metric_double field does not support arrays - Map value = randomAggregateMetric(); + Object value = randomAggregateMetric(); return new SyntheticSourceExample(value, value, this::mapping); } - private Map randomAggregateMetric() { + private Object randomAggregateMetric() { + if (malformedExample && randomBoolean()) { + return malformedValue(); + } + + return validMetrics(); + } + + private Object malformedValue() { + List> choices = List.of( + () -> randomAlphaOfLength(3), + ESTestCase::randomInt, + ESTestCase::randomLong, + ESTestCase::randomFloat, + ESTestCase::randomDouble, + ESTestCase::randomBoolean, + // no metrics + Map::of, + // unmapped metric + () -> { + var metrics = validMetrics(); + metrics.put("hello", "world"); + return metrics; + }, + // missing metric + () -> { + var metrics = validMetrics(); + metrics.remove(storedMetrics.stream().findFirst().get().name()); + return metrics; + }, + // invalid metric value + () -> { + var metrics = validMetrics(); + metrics.put(storedMetrics.stream().findFirst().get().name(), "boom"); + return metrics; + }, + // metric is an object + () -> { + var metrics = validMetrics(); + metrics.put(storedMetrics.stream().findFirst().get().name(), Map.of("hello", "world")); + return metrics; + }, + // invalid metric value with additional data + () -> { + var metrics = validMetrics(); + metrics.put(storedMetrics.stream().findFirst().get().name(), "boom"); + metrics.put("hello", "world"); + metrics.put("object", Map.of("hello", "world")); + metrics.put("list", List.of("hello", "world")); + return metrics; + }, + // negative value count + () -> { + var metrics = validMetrics(); + if (storedMetrics.contains(Metric.value_count.name())) { + metrics.put(Metric.value_count.name(), -100); + } + return metrics; + }, + // value count with decimal digits (whole numbers formatted as doubles are permitted, but non-whole numbers are not) + () -> { + var metrics = validMetrics(); + if (storedMetrics.contains(Metric.value_count.name())) { + metrics.put(Metric.value_count.name(), 10.5); + } + return metrics; + } + ); + + return randomFrom(choices).get(); + } + + private Map validMetrics() { Map value = new LinkedHashMap<>(storedMetrics.size()); for (Metric m : storedMetrics) { if (Metric.value_count == m) { @@ -506,19 +590,14 @@ private Map randomAggregateMetric() { private void mapping(XContentBuilder b) throws IOException { String[] metrics = storedMetrics.stream().map(Metric::toString).toArray(String[]::new); b.field("type", CONTENT_TYPE).array(METRICS_FIELD, metrics).field(DEFAULT_METRIC, metrics[0]); + if (malformedExample) { + b.field(IGNORE_MALFORMED, true); + } } @Override public List invalidExample() throws IOException { - return List.of( - new SyntheticSourceInvalidExample( - matchesPattern("field \\[field] of type \\[.+] doesn't support synthetic source because it ignores malformed numbers"), - b -> { - mapping(b); - b.field("ignore_malformed", true); - } - ) - ); + return List.of(); } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index f42dcc6179d04..706d7ea73aea9 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,5 +1,4 @@ -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.dra.DraResolvePlugin +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -14,42 +13,27 @@ esplugin { extendedPlugins = ['x-pack-autoscaling', 'lang-painless'] } -def localRepo = providers.systemProperty('build.ml_cpp.repo').orNull if (useDra == false) { repositories { exclusiveContent { - filter { - includeGroup 'org.elasticsearch.ml' - } forRepository { ivy { name "ml-cpp" + url providers.systemProperty('build.ml_cpp.repo').orElse('https://prelert-artifacts.s3.amazonaws.com').get() metadataSources { // no repository metadata, look directly for the artifact artifact() } - if (localRepo) { - url localRepo - patternLayout { - artifact "maven/[orgPath]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]" - } - } else { - url "https://artifacts-snapshot.elastic.co/" - patternLayout { - if (VersionProperties.isElasticsearchSnapshot()) { - artifact '/ml-cpp/[revision]/downloads/ml-cpp/[module]-[revision]-[classifier].[ext]' - } else { - // When building locally we always use snapshot artifacts even if passing `-Dbuild.snapshot=false`. - // Release builds are always done with a local repo. - artifact '/ml-cpp/[revision]-SNAPSHOT/downloads/ml-cpp/[module]-[revision]-SNAPSHOT-[classifier].[ext]' - } - } + patternLayout { + artifact "maven/org/elasticsearch/ml/ml-cpp/[revision]/[module]-[revision](-[classifier]).[ext]" } } } + filter { + includeGroup 'org.elasticsearch.ml' + } } } - } configurations { @@ -99,16 +83,21 @@ dependencies { api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}" implementation 'org.ojalgo:ojalgo:51.2.0' - nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:deps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:${mlCppVersion()}:deps@zip") { changing = true } - nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:nodeps@zip") { + nativeBundle("org.elasticsearch.ml:ml-cpp:${mlCppVersion()}:nodeps@zip") { changing = true } testImplementation 'org.ini4j:ini4j:0.5.2' testImplementation "com.google.jimfs:jimfs:${versions.jimfs}" } +def mlCppVersion(){ + return (project.gradle.parent != null && BuildParams.isSnapshotBuild() == false) ? + (project.version + "-SNAPSHOT") : project.version; +} + artifacts { // normal es plugins do not publish the jar but we need to since users need it for extensions archives tasks.named("jar") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index 637ad9d7bbbb2..f468e5239fd29 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -1047,15 +1047,22 @@ static Optional detectReasonIfMlJobsStopped(ClusterChangedEvent event) { if (event.changedCustomMetadataSet().contains(PersistentTasksCustomMetadata.TYPE) == false) { return Optional.empty(); } - final PersistentTasksCustomMetadata previousPersistentTasks = event.previousState() - .getMetadata() - .custom(PersistentTasksCustomMetadata.TYPE); - final PersistentTasksCustomMetadata currentPersistentTasks = event.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE); - Set previousMlTaskIds = findMlProcessTaskIds(previousPersistentTasks); + + PersistentTasksCustomMetadata previousPersistentTasks = PersistentTasksCustomMetadata.getPersistentTasksCustomMetadata( + event.previousState() + ); + if (previousPersistentTasks == null) { // no previous jobs so nothing has stopped + return Optional.empty(); + } + + PersistentTasksCustomMetadata currentPersistentTasks = PersistentTasksCustomMetadata.getPersistentTasksCustomMetadata( + event.state() + ); Set currentMlTaskIds = findMlProcessTaskIds(currentPersistentTasks); - Set stoppedTaskTypes = previousMlTaskIds.stream() - .filter(id -> currentMlTaskIds.contains(id) == false) // remove the tasks that are still present. Stopped Ids only. - .map(previousPersistentTasks::getTask) + + Set> previousMlTasks = MlTasks.findMlProcessTasks(previousPersistentTasks); + Set stoppedTaskTypes = previousMlTasks.stream() + .filter(task -> currentMlTaskIds.contains(task.getId()) == false) // remove the tasks that are still present. Stopped Ids only. .map(PersistentTasksCustomMetadata.PersistentTask::getTaskName) .map(MlTasks::prettyPrintTaskName) .collect(Collectors.toSet()); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java index c590e3d932a9f..c4a058013caf2 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityTestCase.java @@ -77,6 +77,7 @@ public abstract class AbstractRemoteClusterSecurityTestCase extends ESRestTestCa .configFile("remote-cluster-client.key", Resource.fromClasspath("ssl/remote-cluster-client.key")) .configFile("remote-cluster-client.crt", Resource.fromClasspath("ssl/remote-cluster-client.crt")) .configFile("remote-cluster-client-ca.crt", Resource.fromClasspath("ssl/remote-cluster-client-ca.crt")) + .module("reindex") // Needed for the role metadata migration .user(USER, PASS.toString()); protected static ElasticsearchCluster fulfillingCluster; diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 0299eca2db7dd..b7198a6a88984 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -44,8 +44,6 @@ import static org.hamcrest.Matchers.equalTo; public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTestCase { - private static final String ESQL_VERSION = "2024.04.01"; - private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); private static final AtomicReference> REST_API_KEY_MAP_REF = new AtomicReference<>(); private static final AtomicBoolean SSL_ENABLED_REF = new AtomicBoolean(); @@ -767,7 +765,6 @@ protected Request esqlRequest(String command) throws IOException { body.endObject(); } } - body.field("version", ESQL_VERSION); body.endObject(); Request request = new Request("POST", "_query"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 2fc894c69aa4c..28f768d13dbf1 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -607,6 +607,7 @@ public class Constants { "internal:cluster/coordination_diagnostics/info", "internal:cluster/formation/info", "internal:gateway/local/started_shards", - "internal:admin/indices/prevalidate_shard_path" + "internal:admin/indices/prevalidate_shard_path", + "internal:index/metadata/migration_version/update" ).filter(Objects::nonNull).collect(Collectors.toUnmodifiableSet()); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index 3fbcd00690e82..f83aeb117b7b8 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -252,6 +252,7 @@ public void testRequestCacheForFLS() { assertCacheState(FLS_INDEX, 2, 4); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109010") public void testRequestCacheForBothDLSandFLS() throws ExecutionException, InterruptedException { final Client powerClient = client(); final Client limitedClient = limitedClient(); @@ -315,6 +316,7 @@ public void testRequestCacheForBothDLSandFLS() throws ExecutionException, Interr assertCacheState(INDEX, 2, 5); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109011") public void testRequestCacheWithTemplateRoleQuery() { final Client client1 = client().filterWithHeader( Map.of("Authorization", basicAuthHeaderValue(DLS_TEMPLATE_ROLE_QUERY_USER_1, new SecureString(TEST_PASSWORD.toCharArray()))) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3712c66fe9af6..84fa92bb7d2d4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -16,6 +16,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -44,6 +46,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.ssl.KeyStoreUtil; import org.elasticsearch.common.ssl.SslConfiguration; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -73,6 +76,8 @@ import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -80,6 +85,7 @@ import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.NetworkPlugin; +import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.SearchPlugin; @@ -121,6 +127,7 @@ import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; +import org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyRequestTranslator; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyAction; @@ -206,6 +213,7 @@ import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -391,6 +399,9 @@ import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry; import org.elasticsearch.xpack.security.support.ExtensionComponents; import org.elasticsearch.xpack.security.support.ReloadableSecurityComponent; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; +import org.elasticsearch.xpack.security.support.SecurityMigrationExecutor; +import org.elasticsearch.xpack.security.support.SecurityMigrations; import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import org.elasticsearch.xpack.security.transport.SecurityHttpSettings; import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor; @@ -414,6 +425,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Predicate; @@ -444,10 +456,13 @@ public class Security extends Plugin ExtensiblePlugin, SearchPlugin, RestServerActionPlugin, - ReloadablePlugin { + ReloadablePlugin, + PersistentTaskPlugin { public static final String SECURITY_CRYPTO_THREAD_POOL_NAME = XPackField.SECURITY + "-crypto"; + private static final int MAX_SECURITY_MIGRATION_RETRY_COUNT = 10; + // TODO: ip filtering does not actually track license usage yet public static final LicensedFeature.Momentary IP_FILTERING_FEATURE = LicensedFeature.momentaryLenient( null, @@ -584,6 +599,8 @@ public class Security extends Plugin private final SetOnce grantApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce getBuiltinPrivilegesResponseTranslator = new SetOnce<>(); private final SetOnce hasPrivilegesRequestBuilderFactory = new SetOnce<>(); + + private final SetOnce persistentTasksService = new SetOnce<>(); private final SetOnce fileRolesStore = new SetOnce<>(); private final SetOnce operatorPrivilegesService = new SetOnce<>(); private final SetOnce reservedRoleMappingAction = new SetOnce<>(); @@ -595,6 +612,13 @@ public class Security extends Plugin private final SetOnce fileRoleValidator = new SetOnce<>(); private final SetOnce secondaryAuthActions = new SetOnce<>(); + private final SetOnce securityMigrationExecutor = new SetOnce<>(); + + // Node local retry count for migration jobs that's checked only on the master node to make sure + // submit migration jobs doesn't get out of hand and retries forever if they fail. Reset by a + // restart or master node change. + private final AtomicInteger nodeLocalMigrationRetryCount = new AtomicInteger(0); + public Security(Settings settings) { this(settings, Collections.emptyList()); } @@ -683,7 +707,8 @@ public Collection createComponents(PluginServices services) { services.environment(), services.nodeEnvironment().nodeMetadata(), services.indexNameExpressionResolver(), - services.telemetryProvider() + services.telemetryProvider(), + new PersistentTasksService(services.clusterService(), services.threadPool(), services.client()) ); } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); @@ -702,7 +727,8 @@ Collection createComponents( Environment environment, NodeMetadata nodeMetadata, IndexNameExpressionResolver expressionResolver, - TelemetryProvider telemetryProvider + TelemetryProvider telemetryProvider, + PersistentTasksService persistentTasksService ) throws Exception { logger.info("Security is {}", enabled ? "enabled" : "disabled"); if (enabled == false) { @@ -715,7 +741,24 @@ Collection createComponents( // See Plugin#additionalSettings() this.settings = environment.settings(); - systemIndices.init(client, clusterService); + systemIndices.init(client, featureService, clusterService); + + this.securityMigrationExecutor.set( + new SecurityMigrationExecutor( + SecurityMigrationTaskParams.TASK_NAME, + threadPool.executor(ThreadPool.Names.MANAGEMENT), + systemIndices.getMainIndexManager(), + client, + SecurityMigrations.MIGRATIONS_BY_VERSION + ) + ); + this.persistentTasksService.set(persistentTasksService); + + systemIndices.getMainIndexManager().addStateListener((oldState, newState) -> { + if (clusterService.state().nodes().isLocalNodeElectedMaster()) { + applyPendingSecurityMigrations(newState); + } + }); scriptServiceReference.set(scriptService); // We need to construct the checks here while the secure settings are still available. @@ -839,7 +882,8 @@ Collection createComponents( client, getLicenseState(), systemIndices.getMainIndexManager(), - clusterService + clusterService, + featureService ); RoleDescriptor.setFieldPermissionsCache(fieldPermissionsCache); // Need to set to default if it wasn't set by an extension @@ -1118,6 +1162,44 @@ Collection createComponents( return components; } + private void applyPendingSecurityMigrations(SecurityIndexManager.State newState) { + Map.Entry nextMigration = SecurityMigrations.MIGRATIONS_BY_VERSION.higherEntry( + newState.migrationsVersion + ); + + if (nextMigration == null) { + return; + } + + // Check if next migration that has not been applied is eligible to run on the current cluster + if (systemIndices.getMainIndexManager().isEligibleSecurityMigration(nextMigration.getValue()) == false) { + // Reset retry counter if all eligible migrations have been applied successfully + nodeLocalMigrationRetryCount.set(0); + } else if (nodeLocalMigrationRetryCount.get() > MAX_SECURITY_MIGRATION_RETRY_COUNT) { + logger.warn("Security migration failed [" + nodeLocalMigrationRetryCount.get() + "] times, restart node to retry again."); + } else if (systemIndices.getMainIndexManager().isReadyForSecurityMigration(nextMigration.getValue())) { + nodeLocalMigrationRetryCount.incrementAndGet(); + persistentTasksService.get() + .sendStartRequest( + SecurityMigrationTaskParams.TASK_NAME, + SecurityMigrationTaskParams.TASK_NAME, + new SecurityMigrationTaskParams(newState.migrationsVersion), + null, + ActionListener.wrap((response) -> { + logger.debug("Security migration task submitted"); + }, (exception) -> { + // Do nothing if the task is already in progress + if (ExceptionsHelper.unwrapCause(exception) instanceof ResourceAlreadyExistsException) { + // Do not count ResourceAlreadyExistsException as failure + nodeLocalMigrationRetryCount.decrementAndGet(); + } else { + logger.warn("Submit security migration task failed: " + exception.getCause()); + } + }) + ); + } + } + private AuthorizationEngine getAuthorizationEngine() { return findValueFromExtensions("authorization engine", extension -> extension.getAuthorizationEngine(settings)); } @@ -1445,6 +1527,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(GetSecuritySettingsAction.INSTANCE, TransportGetSecuritySettingsAction.class), new ActionHandler<>(UpdateSecuritySettingsAction.INSTANCE, TransportUpdateSecuritySettingsAction.class), new ActionHandler<>(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, TransportReloadRemoteClusterCredentialsAction.class), + new ActionHandler<>(UpdateIndexMigrationVersionAction.INSTANCE, UpdateIndexMigrationVersionAction.TransportAction.class), usageAction, infoAction ).filter(Objects::nonNull).toList(); @@ -2181,6 +2264,17 @@ public CheckedBiConsumer getReque return new DlsFlsRequestCacheDifferentiator(getLicenseState(), securityContext, scriptServiceReference); } + @Override + public List> getPersistentTasksExecutor( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + SettingsModule settingsModule, + IndexNameExpressionResolver expressionResolver + ) { + return this.securityMigrationExecutor.get() != null ? List.of(this.securityMigrationExecutor.get()) : List.of(); + } + List> reservedClusterStateHandlers() { // If security is disabled we never call the plugin createComponents if (enabled == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java index d3c96107f3e15..c1fe553f41334 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityFeatures.java @@ -10,13 +10,24 @@ import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.VERSION_SECURITY_PROFILE_ORIGIN; public class SecurityFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of(SECURITY_ROLES_METADATA_FLATTENED, SECURITY_MIGRATION_FRAMEWORK); + } + @Override public Map getHistoricalFeatures() { - return Map.of(SecuritySystemIndices.SECURITY_PROFILE_ORIGIN_FEATURE, SecuritySystemIndices.VERSION_SECURITY_PROFILE_ORIGIN); + return Map.of(SECURITY_PROFILE_ORIGIN_FEATURE, VERSION_SECURITY_PROFILE_ORIGIN); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index bb326f47082f3..5bd837c7d817c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; @@ -74,6 +75,7 @@ import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; /** * NativeRolesStore is a {@code RolesStore} that, instead of reading from a @@ -113,18 +115,22 @@ public class NativeRolesStore implements BiConsumer, ActionListener< private final ClusterService clusterService; + private final FeatureService featureService; + public NativeRolesStore( Settings settings, Client client, XPackLicenseState licenseState, SecurityIndexManager securityIndex, - ClusterService clusterService + ClusterService clusterService, + FeatureService featureService ) { this.settings = settings; this.client = client; this.licenseState = licenseState; this.securityIndex = securityIndex; this.clusterService = clusterService; + this.featureService = featureService; this.enabled = settings.getAsBoolean(NATIVE_ROLES_ENABLED, true); } @@ -299,7 +305,12 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final XContentBuilder xContentBuilder; try { - xContentBuilder = role.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, true); + xContentBuilder = role.toXContent( + jsonBuilder(), + ToXContent.EMPTY_PARAMS, + true, + featureService.clusterHasFeature(clusterService.state(), SECURITY_ROLES_METADATA_FLATTENED) + ); } catch (IOException e) { listener.onFailure(e); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 95574c317495a..1ac22bfd21883 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -33,6 +33,8 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -40,19 +42,25 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.security.SecurityFeatures; import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_FORMAT_SETTING; import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_DATA_KEY; +import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_KEY; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.State.UNRECOVERED_STATE; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MIGRATION_FRAMEWORK; /** * Manages the lifecycle, mapping and data upgrades/migrations of the {@code RestrictedIndicesNames#SECURITY_MAIN_ALIAS} @@ -79,18 +87,35 @@ public enum Availability { private volatile State state; private final boolean defensiveCopy; + private final FeatureService featureService; + + private final Set allSecurityFeatures = new SecurityFeatures().getFeatures(); public static SecurityIndexManager buildSecurityIndexManager( Client client, ClusterService clusterService, + FeatureService featureService, SystemIndexDescriptor descriptor ) { - final SecurityIndexManager securityIndexManager = new SecurityIndexManager(client, descriptor, State.UNRECOVERED_STATE, false); + final SecurityIndexManager securityIndexManager = new SecurityIndexManager( + featureService, + client, + descriptor, + State.UNRECOVERED_STATE, + false + ); clusterService.addListener(securityIndexManager); return securityIndexManager; } - private SecurityIndexManager(Client client, SystemIndexDescriptor descriptor, State state, boolean defensiveCopy) { + private SecurityIndexManager( + FeatureService featureService, + Client client, + SystemIndexDescriptor descriptor, + State state, + boolean defensiveCopy + ) { + this.featureService = featureService; this.client = client; this.state = state; this.systemIndexDescriptor = descriptor; @@ -102,7 +127,7 @@ private SecurityIndexManager(Client client, SystemIndexDescriptor descriptor, St * should be reused for multiple checks in the same workflow. */ public SecurityIndexManager defensiveCopy() { - return new SecurityIndexManager(null, systemIndexDescriptor, state, true); + return new SecurityIndexManager(null, null, systemIndexDescriptor, state, true); } public String aliasName() { @@ -229,6 +254,7 @@ public void clusterChanged(ClusterChangedEvent event) { } final State previousState = state; final IndexMetadata indexMetadata = resolveConcreteIndex(systemIndexDescriptor.getAliasName(), event.state().metadata()); + final Map customMetadata = indexMetadata == null ? null : indexMetadata.getCustomData(MIGRATION_VERSION_CUSTOM_KEY); final Instant creationTime = indexMetadata != null ? Instant.ofEpochMilli(indexMetadata.getCreationDate()) : null; final boolean isIndexUpToDate = indexMetadata == null || INDEX_FORMAT_SETTING.get(indexMetadata.getSettings()) == systemIndexDescriptor.getIndexFormat(); @@ -236,7 +262,9 @@ public void clusterChanged(ClusterChangedEvent event) { final boolean indexAvailableForWrite = available.v1(); final boolean indexAvailableForSearch = available.v2(); final boolean mappingIsUpToDate = indexMetadata == null || checkIndexMappingUpToDate(event.state()); - final SystemIndexDescriptor.MappingsVersion mappingVersion = getMinSecurityIndexMappingVersion(event.state()); + final int migrationsVersion = customMetadata == null ? 0 : Integer.parseInt(customMetadata.get(MIGRATION_VERSION_CUSTOM_DATA_KEY)); + final SystemIndexDescriptor.MappingsVersion minClusterMappingVersion = getMinSecurityIndexMappingVersion(event.state()); + final int indexMappingVersion = loadIndexMappingVersion(systemIndexDescriptor.getAliasName(), event.state()); final String concreteIndexName = indexMetadata == null ? systemIndexDescriptor.getPrimaryIndex() : indexMetadata.getIndex().getName(); @@ -262,11 +290,16 @@ public void clusterChanged(ClusterChangedEvent event) { indexAvailableForSearch, indexAvailableForWrite, mappingIsUpToDate, - mappingVersion, + migrationsVersion, + minClusterMappingVersion, + indexMappingVersion, concreteIndexName, indexHealth, indexState, - indexUUID + indexUUID, + allSecurityFeatures.stream() + .filter(feature -> featureService.clusterHasFeature(event.state(), feature)) + .collect(Collectors.toSet()) ); this.state = newState; @@ -321,6 +354,20 @@ private Tuple checkIndexAvailable(ClusterState state) { return new Tuple<>(allPrimaryShards, searchShards); } + public boolean isEligibleSecurityMigration(SecurityMigrations.SecurityMigration securityMigration) { + return state.securityFeatures.containsAll(securityMigration.nodeFeaturesRequired()) + && state.indexMappingVersion >= securityMigration.minMappingVersion(); + } + + public boolean isReadyForSecurityMigration(SecurityMigrations.SecurityMigration securityMigration) { + return state.indexAvailableForWrite + && state.indexAvailableForSearch + && state.isIndexUpToDate + && state.indexExists() + && state.securityFeatures.contains(SECURITY_MIGRATION_FRAMEWORK) + && isEligibleSecurityMigration(securityMigration); + } + /** * Detect if the mapping in the security index is outdated. If it's outdated it means that whatever is in cluster state is more recent. * There could be several nodes on different ES versions (mixed cluster) supporting different mapping versions, so only return false if @@ -398,6 +445,10 @@ public void checkIndexVersionThenExecute(final Consumer consumer, fin } } + public String getConcreteIndexName() { + return state.concreteIndexName; + } + /** * Prepares the index by creating it if it doesn't exist, then executes the runnable. * @param consumer a handler for any exceptions that are raised either during preparation or execution @@ -421,7 +472,9 @@ public void prepareIndexIfNeededThenExecute(final Consumer consumer, ); } else if (state.indexExists() == false) { assert state.concreteIndexName != null; - final SystemIndexDescriptor descriptorForVersion = systemIndexDescriptor.getDescriptorCompatibleWith(state.mappingVersion); + final SystemIndexDescriptor descriptorForVersion = systemIndexDescriptor.getDescriptorCompatibleWith( + state.minClusterMappingVersion + ); if (descriptorForVersion == null) { final String error = systemIndexDescriptor.getMinimumMappingsVersionMessage("create index"); @@ -470,7 +523,9 @@ public void onFailure(Exception e) { ); } } else if (state.mappingUpToDate == false) { - final SystemIndexDescriptor descriptorForVersion = systemIndexDescriptor.getDescriptorCompatibleWith(state.mappingVersion); + final SystemIndexDescriptor descriptorForVersion = systemIndexDescriptor.getDescriptorCompatibleWith( + state.minClusterMappingVersion + ); if (descriptorForVersion == null) { final String error = systemIndexDescriptor.getMinimumMappingsVersionMessage("updating mapping"); @@ -527,17 +582,36 @@ public static boolean isIndexDeleted(State previousState, State currentState) { * State of the security index. */ public static class State { - public static final State UNRECOVERED_STATE = new State(null, false, false, false, false, null, null, null, null, null); + public static final State UNRECOVERED_STATE = new State( + null, + false, + false, + false, + false, + null, + null, + null, + null, + null, + null, + null, + Set.of() + ); public final Instant creationTime; public final boolean isIndexUpToDate; public final boolean indexAvailableForSearch; public final boolean indexAvailableForWrite; public final boolean mappingUpToDate; - public final SystemIndexDescriptor.MappingsVersion mappingVersion; + public final Integer migrationsVersion; + // Min mapping version supported by the descriptors in the cluster + public final SystemIndexDescriptor.MappingsVersion minClusterMappingVersion; + // Applied mapping version + public final Integer indexMappingVersion; public final String concreteIndexName; public final ClusterHealthStatus indexHealth; public final IndexMetadata.State indexState; public final String indexUUID; + public final Set securityFeatures; public State( Instant creationTime, @@ -545,22 +619,28 @@ public State( boolean indexAvailableForSearch, boolean indexAvailableForWrite, boolean mappingUpToDate, - SystemIndexDescriptor.MappingsVersion mappingVersion, + Integer migrationsVersion, + SystemIndexDescriptor.MappingsVersion minClusterMappingVersion, + Integer indexMappingVersion, String concreteIndexName, ClusterHealthStatus indexHealth, IndexMetadata.State indexState, - String indexUUID + String indexUUID, + Set securityFeatures ) { this.creationTime = creationTime; this.isIndexUpToDate = isIndexUpToDate; this.indexAvailableForSearch = indexAvailableForSearch; this.indexAvailableForWrite = indexAvailableForWrite; this.mappingUpToDate = mappingUpToDate; - this.mappingVersion = mappingVersion; + this.migrationsVersion = migrationsVersion; + this.minClusterMappingVersion = minClusterMappingVersion; + this.indexMappingVersion = indexMappingVersion; this.concreteIndexName = concreteIndexName; this.indexHealth = indexHealth; this.indexState = indexState; this.indexUUID = indexUUID; + this.securityFeatures = securityFeatures; } @Override @@ -573,10 +653,13 @@ public boolean equals(Object o) { && indexAvailableForSearch == state.indexAvailableForSearch && indexAvailableForWrite == state.indexAvailableForWrite && mappingUpToDate == state.mappingUpToDate - && Objects.equals(mappingVersion, state.mappingVersion) + && Objects.equals(indexMappingVersion, state.indexMappingVersion) + && Objects.equals(migrationsVersion, state.migrationsVersion) + && Objects.equals(minClusterMappingVersion, state.minClusterMappingVersion) && Objects.equals(concreteIndexName, state.concreteIndexName) && indexHealth == state.indexHealth - && indexState == state.indexState; + && indexState == state.indexState + && Objects.equals(securityFeatures, state.securityFeatures); } public boolean indexExists() { @@ -591,9 +674,12 @@ public int hashCode() { indexAvailableForSearch, indexAvailableForWrite, mappingUpToDate, - mappingVersion, + migrationsVersion, + minClusterMappingVersion, + indexMappingVersion, concreteIndexName, - indexHealth + indexHealth, + securityFeatures ); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java new file mode 100644 index 0000000000000..bd5d0fb5a8ef5 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction; +import org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams; + +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.Executor; + +public class SecurityMigrationExecutor extends PersistentTasksExecutor { + + private static final Logger logger = LogManager.getLogger(SecurityMigrationExecutor.class); + private final SecurityIndexManager securityIndexManager; + private final Client client; + private final TreeMap migrationByVersion; + + public SecurityMigrationExecutor( + String taskName, + Executor executor, + SecurityIndexManager securityIndexManager, + Client client, + TreeMap migrationByVersion + ) { + super(taskName, executor); + this.securityIndexManager = securityIndexManager; + this.client = client; + this.migrationByVersion = migrationByVersion; + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, SecurityMigrationTaskParams params, PersistentTaskState state) { + applyOutstandingMigrations(task, params.getMigrationVersion(), ActionListener.wrap((res) -> task.markAsCompleted(), (exception) -> { + logger.warn("Security migration failed: " + exception); + task.markAsFailed(exception); + })); + } + + private void applyOutstandingMigrations(AllocatedPersistentTask task, int currentMigrationVersion, ActionListener listener) { + if (task.isCancelled()) { + listener.onFailure(new TaskCancelledException("Security migration task cancelled")); + return; + } + Map.Entry migrationEntry = migrationByVersion.higherEntry(currentMigrationVersion); + + // Check if all nodes can support feature and that the cluster is on a compatible mapping version + if (migrationEntry != null && securityIndexManager.isReadyForSecurityMigration(migrationEntry.getValue())) { + migrationEntry.getValue() + .migrate( + securityIndexManager, + client, + ActionListener.wrap( + response -> updateMigrationVersion( + migrationEntry.getKey(), + securityIndexManager.getConcreteIndexName(), + new ThreadedActionListener<>( + this.getExecutor(), + ActionListener.wrap( + updateResponse -> applyOutstandingMigrations(task, migrationEntry.getKey(), listener), + listener::onFailure + ) + ) + ), + listener::onFailure + ) + ); + } else { + logger.info("Security migrations applied until version: [" + currentMigrationVersion + "]"); + listener.onResponse(null); + } + } + + private void updateMigrationVersion(int migrationVersion, String indexName, ActionListener listener) { + client.execute( + UpdateIndexMigrationVersionAction.INSTANCE, + new UpdateIndexMigrationVersionAction.Request(TimeValue.MAX_VALUE, migrationVersion, indexName), + ActionListener.wrap((response) -> { + listener.onResponse(null); + }, listener::onFailure) + ); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java new file mode 100644 index 0000000000000..8ef132ad0ed34 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.reindex.UpdateByQueryAction; +import org.elasticsearch.index.reindex.UpdateByQueryRequest; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS; + +public class SecurityMigrations { + + public interface SecurityMigration { + /** + * Method that will execute the actual migration - needs to be idempotent and non-blocking + * + * @param indexManager for the security index + * @param client the index client + * @param listener listener to provide updates back to caller + */ + void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener); + + /** + * Any node features that are required for this migration to run. This makes sure that all nodes in the cluster can handle any + * changes in behaviour introduced by the migration. + * + * @return a set of features needed to be supported or an empty set if no change in behaviour is expected + */ + Set nodeFeaturesRequired(); + + /** + * The min mapping version required to support this migration. This makes sure that the index has at least the min mapping that is + * required to support the migration. + * + * @return the minimum mapping version required to apply this migration + */ + int minMappingVersion(); + } + + public static final TreeMap MIGRATIONS_BY_VERSION = new TreeMap<>(Map.of(1, new SecurityMigration() { + private static final Logger logger = LogManager.getLogger(SecurityMigration.class); + + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + BoolQueryBuilder filterQuery = new BoolQueryBuilder().filter(QueryBuilders.termQuery("type", "role")) + .mustNot(QueryBuilders.existsQuery("metadata_flattened")); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(filterQuery).size(0).trackTotalHits(true); + SearchRequest countRequest = new SearchRequest(indexManager.getConcreteIndexName()); + countRequest.source(searchSourceBuilder); + + client.search(countRequest, ActionListener.wrap(response -> { + // If there are no roles, skip migration + if (response.getHits().getTotalHits().value > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + updateRolesByQuery(indexManager, client, filterQuery, listener); + } else { + listener.onResponse(null); + } + }, listener::onFailure)); + } + + private void updateRolesByQuery( + SecurityIndexManager indexManager, + Client client, + BoolQueryBuilder filterQuery, + ActionListener listener + ) { + UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(indexManager.getConcreteIndexName()); + updateByQueryRequest.setQuery(filterQuery); + updateByQueryRequest.setScript( + new Script(ScriptType.INLINE, "painless", "ctx._source.metadata_flattened = ctx._source.metadata", Collections.emptyMap()) + ); + client.admin() + .cluster() + .execute(UpdateByQueryAction.INSTANCE, updateByQueryRequest, ActionListener.wrap(bulkByScrollResponse -> { + logger.info("Migrated [" + bulkByScrollResponse.getTotal() + "] roles"); + listener.onResponse(null); + }, listener::onFailure)); + } + + @Override + public Set nodeFeaturesRequired() { + return Set.of(SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED); + } + + @Override + public int minMappingVersion() { + return ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS.id(); + } + })); +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 9e20cb05a3cdc..75937bf3e2c18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.VersionId; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.ExecutorNames; @@ -56,6 +57,8 @@ public class SecuritySystemIndices { public static final String SECURITY_PROFILE_ALIAS = ".security-profile"; public static final Version VERSION_SECURITY_PROFILE_ORIGIN = Version.V_8_3_0; public static final NodeFeature SECURITY_PROFILE_ORIGIN_FEATURE = new NodeFeature("security.security_profile_origin"); + public static final NodeFeature SECURITY_MIGRATION_FRAMEWORK = new NodeFeature("security.migration_framework"); + public static final NodeFeature SECURITY_ROLES_METADATA_FLATTENED = new NodeFeature("security.roles_metadata_flattened"); /** * Security managed index mappings used to be updated based on the product version. They are now updated based on per-index mappings @@ -90,13 +93,18 @@ public Collection getSystemIndexDescriptors() { return List.of(mainDescriptor, tokenDescriptor, profileDescriptor); } - public void init(Client client, ClusterService clusterService) { + public void init(Client client, FeatureService featureService, ClusterService clusterService) { if (this.initialized.compareAndSet(false, true) == false) { throw new IllegalStateException("Already initialized"); } - this.mainIndexManager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, mainDescriptor); - this.tokenIndexManager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, tokenDescriptor); - this.profileIndexManager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, profileDescriptor); + this.mainIndexManager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, featureService, mainDescriptor); + this.tokenIndexManager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, featureService, tokenDescriptor); + this.profileIndexManager = SecurityIndexManager.buildSecurityIndexManager( + client, + clusterService, + featureService, + profileDescriptor + ); } public SecurityIndexManager getMainIndexManager() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 04c5415f51815..69e8d7b8b681e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.license.TestUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; @@ -230,7 +231,8 @@ private Collection createComponentsUtil(Settings settings) throws Except env, nodeMetadata, TestIndexNameExpressionResolver.newInstance(threadContext), - TelemetryProvider.NOOP + TelemetryProvider.NOOP, + mock(PersistentTasksService.class) ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 42de8d014edf2..963ff62bc137d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -2512,10 +2512,13 @@ private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { true, true, null, + null, + null, concreteSecurityIndexName, indexStatus, IndexMetadata.State.OPEN, - "my_uuid" + "my_uuid", + Set.of() ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java index e127f70ac83a8..37a4cd4f783e4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.time.Instant; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.mockito.Mockito.mock; @@ -39,10 +40,13 @@ private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { true, true, null, + null, + null, concreteSecurityIndexName, indexStatus, IndexMetadata.State.OPEN, - "my_uuid" + "my_uuid", + Set.of() ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index b47610797a832..c860ceeafc0f4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -412,10 +412,13 @@ private SecurityIndexManager.State indexState(boolean isUpToDate, ClusterHealthS true, true, null, + null, + null, concreteSecurityIndexName, healthStatus, IndexMetadata.State.OPEN, - "my_uuid" + "my_uuid", + Set.of() ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index c137c7b00b678..9d73f7e688f7d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -1617,10 +1617,13 @@ public SecurityIndexManager.State dummyIndexState(boolean isIndexUpToDate, Clust true, true, null, + null, + null, concreteSecurityIndexName, healthStatus, IndexMetadata.State.OPEN, - "my_uuid" + "my_uuid", + Set.of() ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 2d02117b9728f..6a2ac7721c9a1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -793,10 +793,13 @@ private SecurityIndexManager.State dummyState( true, true, null, + null, + null, concreteSecurityIndexName, healthStatus, IndexMetadata.State.OPEN, - "my_uuid" + "my_uuid", + Set.of() ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 9d83d5f5c60ed..e14a25088f749 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.query.QueryBuilders; @@ -326,14 +327,22 @@ public void testTransformingRoleWithRestrictionFails() throws IOException { public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { final Client client = mock(Client.class); final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(TransportVersion.current()); + final FeatureService featureService = mock(FeatureService.class); final XPackLicenseState licenseState = mock(XPackLicenseState.class); final AtomicBoolean methodCalled = new AtomicBoolean(false); final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); - systemIndices.init(client, clusterService); + systemIndices.init(client, featureService, clusterService); final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, client, licenseState, securityIndex, clusterService) { + final NativeRolesStore rolesStore = new NativeRolesStore( + Settings.EMPTY, + client, + licenseState, + securityIndex, + clusterService, + mock(FeatureService.class) + ) { @Override void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { if (methodCalled.compareAndSet(false, true)) { @@ -443,10 +452,18 @@ enum TEST_MODE { final AtomicBoolean methodCalled = new AtomicBoolean(false); final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); - systemIndices.init(client, clusterService); + final FeatureService featureService = mock(FeatureService.class); + systemIndices.init(client, featureService, clusterService); final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - final NativeRolesStore rolesStore = new NativeRolesStore(Settings.EMPTY, client, licenseState, securityIndex, clusterService) { + final NativeRolesStore rolesStore = new NativeRolesStore( + Settings.EMPTY, + client, + licenseState, + securityIndex, + clusterService, + mock(FeatureService.class) + ) { @Override void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { if (methodCalled.compareAndSet(false, true)) { @@ -502,10 +519,18 @@ public void testGetRoleWhenDisabled() throws Exception { final ClusterService clusterService = mock(ClusterService.class); final XPackLicenseState licenseState = mock(XPackLicenseState.class); final SecuritySystemIndices systemIndices = new SecuritySystemIndices(settings); - systemIndices.init(client, clusterService); + final FeatureService featureService = mock(FeatureService.class); + systemIndices.init(client, featureService, clusterService); final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - final NativeRolesStore store = new NativeRolesStore(settings, client, licenseState, securityIndex, clusterService); + final NativeRolesStore store = new NativeRolesStore( + settings, + client, + licenseState, + securityIndex, + clusterService, + mock(FeatureService.class) + ); final PlainActionFuture future = new PlainActionFuture<>(); store.getRoleDescriptors(Set.of(randomAlphaOfLengthBetween(4, 12)), future); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java index 6b60336276c35..698809beb6d30 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java @@ -61,11 +61,14 @@ public void testSecurityIndexStateChangeWillInvalidateAllRegisteredInvalidators( true, true, true, + null, new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0), + null, ".security", ClusterHealthStatus.GREEN, IndexMetadata.State.OPEN, - "my_uuid" + "my_uuid", + Set.of() ); cacheInvalidatorRegistry.onSecurityIndexStateChange(previousState, currentState); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index a7c5c616cf5bf..493483a5e4a1b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -39,6 +39,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; @@ -50,6 +52,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; +import org.elasticsearch.xpack.security.SecurityFeatures; import org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.hamcrest.Matchers; @@ -58,10 +61,13 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.HashSet; +import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; +import java.util.stream.Collectors; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -108,6 +114,7 @@ protected void } }; + final FeatureService featureService = new FeatureService(List.of(new SecurityFeatures())); final ClusterService clusterService = mock(ClusterService.class); when(clusterService.getSettings()).thenReturn(Settings.EMPTY); final SystemIndexDescriptor descriptor = new SecuritySystemIndices(clusterService.getSettings()).getSystemIndexDescriptors() @@ -116,7 +123,7 @@ protected void .findFirst() .get(); descriptorSpy = spy(descriptor); - manager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, descriptorSpy); + manager = SecurityIndexManager.buildSecurityIndexManager(client, clusterService, featureService, descriptorSpy); } public void testIndexWithUpToDateMappingAndTemplate() { @@ -563,6 +570,90 @@ public void testIndexOutOfDateListeners() { assertTrue(manager.isIndexUpToDate()); } + public void testReadyForMigration() { + final ClusterState.Builder clusterStateBuilder = createClusterState( + TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7, + SecuritySystemIndices.SECURITY_MAIN_ALIAS, + IndexMetadata.State.OPEN + ); + clusterStateBuilder.nodeFeatures( + Map.of("1", new SecurityFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet())) + ); + manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); + assertTrue(manager.isReadyForSecurityMigration(new SecurityMigrations.SecurityMigration() { + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + listener.onResponse(null); + } + + @Override + public Set nodeFeaturesRequired() { + return Set.of(); + } + + @Override + public int minMappingVersion() { + return 0; + } + })); + } + + public void testNotReadyForMigrationBecauseOfFeature() { + final ClusterState.Builder clusterStateBuilder = createClusterState( + TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7, + SecuritySystemIndices.SECURITY_MAIN_ALIAS, + IndexMetadata.State.OPEN + ); + clusterStateBuilder.nodeFeatures( + Map.of("1", new SecurityFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet())) + ); + manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); + assertFalse(manager.isReadyForSecurityMigration(new SecurityMigrations.SecurityMigration() { + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + listener.onResponse(null); + } + + @Override + public Set nodeFeaturesRequired() { + return Set.of(new NodeFeature("not a real feature")); + } + + @Override + public int minMappingVersion() { + return 0; + } + })); + } + + public void testNotReadyForMigrationBecauseOfMappingVersion() { + final ClusterState.Builder clusterStateBuilder = createClusterState( + TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7, + SecuritySystemIndices.SECURITY_MAIN_ALIAS, + IndexMetadata.State.OPEN + ); + clusterStateBuilder.nodeFeatures( + Map.of("1", new SecurityFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet())) + ); + manager.clusterChanged(event(markShardsAvailable(clusterStateBuilder))); + assertFalse(manager.isReadyForSecurityMigration(new SecurityMigrations.SecurityMigration() { + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + listener.onResponse(null); + } + + @Override + public Set nodeFeaturesRequired() { + return Set.of(); + } + + @Override + public int minMappingVersion() { + return 1000; + } + })); + } + public void testProcessClosedIndexState() { // Index initially exists final ClusterState.Builder indexAvailable = createClusterState( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java new file mode 100644 index 0000000000000..3c3b322c28a2f --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.security.support; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionResponse; +import org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams; +import org.junit.Before; + +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class SecurityMigrationExecutorTests extends ESTestCase { + private ThreadPool threadPool; + private Client client; + private SecurityIndexManager securityIndexManager; + + private int updateIndexMigrationVersionActionInvocations; + + private boolean clientShouldThrowException = false; + + @Before + public void setUpMocks() { + threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + when(threadPool.generic()).thenReturn(EsExecutors.DIRECT_EXECUTOR_SERVICE); + updateIndexMigrationVersionActionInvocations = 0; + client = new NoOpClient(threadPool) { + @Override + @SuppressWarnings("unchecked") + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + if (clientShouldThrowException) { + listener.onFailure(new IllegalStateException("Bad client")); + return; + } + updateIndexMigrationVersionActionInvocations++; + listener.onResponse((Response) new UpdateIndexMigrationVersionResponse()); + + } + }; + securityIndexManager = mock(SecurityIndexManager.class); + } + + public void testSuccessfulMigration() { + final int[] migrateInvocations = new int[1]; + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) + ); + AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + verify(mockTask, times(1)).markAsCompleted(); + verify(mockTask, times(0)).markAsFailed(any()); + assertEquals(2, updateIndexMigrationVersionActionInvocations); + assertEquals(2, migrateInvocations[0]); + } + + public void testNoMigrationMeetsRequirements() { + final int[] migrateInvocationsCounter = new int[1]; + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>( + Map.of( + 1, + generateMigration(migrateInvocationsCounter, false), + 2, + generateMigration(migrateInvocationsCounter, false), + 3, + generateMigration(migrateInvocationsCounter, false) + ) + ) + ); + + AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + verify(mockTask, times(1)).markAsCompleted(); + verify(mockTask, times(0)).markAsFailed(any()); + assertEquals(0, updateIndexMigrationVersionActionInvocations); + assertEquals(0, migrateInvocationsCounter[0]); + } + + public void testPartialMigration() { + final int[] migrateInvocations = new int[1]; + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>( + Map.of( + 1, + generateMigration(migrateInvocations, true), + 2, + generateMigration(migrateInvocations, true), + 3, + generateMigration(migrateInvocations, false), + 4, + generateMigration(migrateInvocations, false), + 5, + generateMigration(migrateInvocations, true) + ) + ) + ); + + AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + verify(mockTask, times(1)).markAsCompleted(); + verify(mockTask, times(0)).markAsFailed(any()); + assertEquals(2, updateIndexMigrationVersionActionInvocations); + assertEquals(2, migrateInvocations[0]); + } + + public void testNoMigrationNeeded() { + final int[] migrateInvocations = new int[1]; + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) + ); + + AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + SecurityMigrationTaskParams taskParams = mock(SecurityMigrationTaskParams.class); + when(taskParams.getMigrationVersion()).thenReturn(7); + + securityMigrationExecutor.nodeOperation(mockTask, taskParams, mock(PersistentTaskState.class)); + verify(mockTask, times(1)).markAsCompleted(); + verify(mockTask, times(0)).markAsFailed(any()); + assertEquals(0, updateIndexMigrationVersionActionInvocations); + assertEquals(0, migrateInvocations[0]); + } + + public void testMigrationThrowsRuntimeException() { + when(securityIndexManager.isReadyForSecurityMigration(any())).thenReturn(true); + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>(Map.of(1, new SecurityMigrations.SecurityMigration() { + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + throw new IllegalStateException("Oh no, this is a terrible state"); + } + + @Override + public Set nodeFeaturesRequired() { + return Set.of(); + } + + @Override + public int minMappingVersion() { + return 0; + } + })) + ); + + AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + + assertThrows( + IllegalStateException.class, + () -> securityMigrationExecutor.nodeOperation( + mockTask, + mock(SecurityMigrationTaskParams.class), + mock(PersistentTaskState.class) + ) + ); + } + + public void testUpdateMigrationVersionThrowsException() { + final int[] migrateInvocations = new int[1]; + SecurityMigrationExecutor securityMigrationExecutor = new SecurityMigrationExecutor( + "test-task", + threadPool.generic(), + securityIndexManager, + client, + new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) + ); + clientShouldThrowException = true; + AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + verify(mockTask, times(1)).markAsFailed(any()); + verify(mockTask, times(0)).markAsCompleted(); + } + + private SecurityMigrations.SecurityMigration generateMigration(int[] migrateInvocationsCounter, boolean isEligible) { + SecurityMigrations.SecurityMigration migration = new SecurityMigrations.SecurityMigration() { + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + migrateInvocationsCounter[0]++; + listener.onResponse(null); + } + + @Override + public Set nodeFeaturesRequired() { + return Set.of(); + } + + @Override + public int minMappingVersion() { + return 0; + } + }; + when(securityIndexManager.isReadyForSecurityMigration(migration)).thenReturn(isEligible); + return migration; + } +} diff --git a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java index 0b504569073bb..58a51aa3efdd8 100644 --- a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java +++ b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -110,6 +111,12 @@ public void testPutShutdownIsIdempotentForRemove() throws Exception { checkPutShutdownIdempotency("REMOVE"); } + private static void maybeAddMasterNodeTimeout(Request request) { + if (randomBoolean()) { + request.addParameter(RestUtils.REST_MASTER_TIMEOUT_PARAM, TEST_REQUEST_TIMEOUT.getStringRep()); + } + } + @SuppressWarnings("unchecked") private void checkPutShutdownIdempotency(String type) throws Exception { String nodeIdToShutdown = getRandomNodeId(); @@ -122,12 +129,14 @@ private void checkPutShutdownIdempotency(String type) throws Exception { // Put a shutdown request Request putShutdown = new Request("PUT", "_nodes/" + nodeIdToShutdown + "/shutdown"); + maybeAddMasterNodeTimeout(putShutdown); putShutdown.setJsonEntity("{\"type\": \"" + type + "\", \"reason\": \"" + newReason + "\"}"); assertOK(client().performRequest(putShutdown)); // Ensure we can read it back and it has the new reason { Request getShutdownStatus = new Request("GET", "_nodes/" + nodeIdToShutdown + "/shutdown"); + maybeAddMasterNodeTimeout(getShutdownStatus); Map statusResponse = responseAsMap(client().performRequest(getShutdownStatus)); List> nodesArray = (List>) statusResponse.get("nodes"); assertThat(nodesArray, hasSize(1)); @@ -410,6 +419,7 @@ private void putNodeShutdown( // Put a shutdown request Request putShutdown = new Request("PUT", "_nodes/" + nodeIdToShutdown + "/shutdown"); + maybeAddMasterNodeTimeout(putShutdown); try (XContentBuilder putBody = JsonXContent.contentBuilder()) { putBody.startObject(); diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/DesiredBalanceShutdownIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/DesiredBalanceShutdownIT.java index c0c69842030b9..ca87157696d3f 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/DesiredBalanceShutdownIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/DesiredBalanceShutdownIT.java @@ -83,8 +83,10 @@ public Settings onNodeStopped(String newNodeName) { logger.info("--> waiting for replacement to complete"); assertBusy(() -> { - final var getShutdownResponse = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request()) - .actionGet(10, TimeUnit.SECONDS); + final var getShutdownResponse = client().execute( + GetShutdownStatusAction.INSTANCE, + new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT) + ).actionGet(10, TimeUnit.SECONDS); assertTrue( Strings.toString(getShutdownResponse, true, true), getShutdownResponse.getShutdownStatuses() diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownPluginsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownPluginsIT.java index 26f0de92b4405..264403a6d2c18 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownPluginsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownPluginsIT.java @@ -73,13 +73,16 @@ public void testShutdownAwarePlugin() throws Exception { GetShutdownStatusAction.Response getResp = client().execute( GetShutdownStatusAction.INSTANCE, - new GetShutdownStatusAction.Request(remainNode) + new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT, remainNode) ).get(); assertTrue(getResp.getShutdownStatuses().isEmpty()); // The plugin should be in progress - getResp = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(shutdownNode)).get(); + getResp = client().execute( + GetShutdownStatusAction.INSTANCE, + new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT, shutdownNode) + ).get(); assertThat( getResp.getShutdownStatuses().get(0).pluginsStatus().getStatus(), equalTo(SingleNodeShutdownMetadata.Status.IN_PROGRESS) @@ -89,7 +92,10 @@ public void testShutdownAwarePlugin() throws Exception { safe.set(true); // The plugin should be complete - getResp = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(shutdownNode)).get(); + getResp = client().execute( + GetShutdownStatusAction.INSTANCE, + new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT, shutdownNode) + ).get(); assertThat(getResp.getShutdownStatuses().get(0).pluginsStatus().getStatus(), equalTo(SingleNodeShutdownMetadata.Status.COMPLETE)); // The shutdown node should be in the triggered list diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java index 89897706f1862..465388b147a73 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java @@ -105,7 +105,8 @@ private void deleteNodeShutdown(String nodeId) { } private void assertNoShuttingDownNodes(String nodeId) throws ExecutionException, InterruptedException { - var response = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(nodeId)).get(); + var response = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT, nodeId)) + .get(); assertThat(response.getShutdownStatuses(), empty()); } diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index 6c860a73ded82..0c70fd5a252c4 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -473,7 +473,8 @@ private void putNodeShutdown(String nodeId, SingleNodeShutdownMetadata.Type type } private void assertNodeShutdownStatus(String nodeId, SingleNodeShutdownMetadata.Status status) throws Exception { - var response = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(nodeId)).get(); + var response = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT, nodeId)) + .get(); assertThat(response.getShutdownStatuses().get(0).migrationStatus().getStatus(), equalTo(status)); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index 7266f8ff71129..d88d3c35bf3ac 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.shutdown; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -17,6 +18,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -42,17 +45,50 @@ public static class Request extends MasterNodeRequest { private final String[] nodeIds; - public Request(String... nodeIds) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + @Deprecated(forRemoval = true) // temporary compatibility shim + public Request() { + super(MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + nodeIds = Strings.EMPTY_ARRAY; + } + + @Deprecated(forRemoval = true) // temporary compatibility shim + public Request(String nodeId) { + super(MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + nodeIds = new String[] { nodeId }; + } + + public Request(TimeValue masterNodeTimeout, String... nodeIds) { + super(masterNodeTimeout); this.nodeIds = nodeIds; } + @UpdateForV9 // only needed for bwc, inline in v9 public static Request readFrom(StreamInput in) throws IOException { - return new Request(in.readStringArray()); + if (in.getTransportVersion().onOrAfter(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT)) { + return new Request(in); + } else { + return new Request(TimeValue.THIRTY_SECONDS, in); + } + } + + private Request(StreamInput in) throws IOException { + super(in); + assert in.getTransportVersion().onOrAfter(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT); + nodeIds = in.readStringArray(); + } + + @UpdateForV9 // only needed for bwc, remove in v9 + private Request(TimeValue masterNodeTimeout, StreamInput in) throws IOException { + super(masterNodeTimeout); + assert in.getTransportVersion().before(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT); + nodeIds = in.readStringArray(); } @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT)) { + super.writeTo(out); + } out.writeStringArray(this.nodeIds); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java index babe2174b0952..66bcee05c4dff 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/RestGetShutdownStatusAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; @@ -36,10 +37,13 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - String[] nodeIds = Strings.commaDelimitedListToStringArray(request.param("nodeId")); + final var actionRequest = new GetShutdownStatusAction.Request( + RestUtils.getMasterNodeTimeout(request), + Strings.commaDelimitedListToStringArray(request.param("nodeId")) + ); return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute( GetShutdownStatusAction.INSTANCE, - new GetShutdownStatusAction.Request(nodeIds), + actionRequest, new RestRefCountedChunkedToXContentListener<>(channel) ); } diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java index 769d93a94ae69..166bec9ec5f62 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java @@ -25,6 +25,7 @@ protected Writeable.Reader instanceReader() { @Override protected GetShutdownStatusAction.Request createTestInstance() { return new GetShutdownStatusAction.Request( + TEST_REQUEST_TIMEOUT, randomList(0, 20, () -> randomAlphaOfLengthBetween(15, 25)).toArray(Strings.EMPTY_ARRAY) ); } @@ -35,6 +36,6 @@ protected GetShutdownStatusAction.Request mutateInstance(GetShutdownStatusAction String[] newNodeIds = randomList(1, 20, () -> randomValueOtherThanMany(oldIds::contains, () -> randomAlphaOfLengthBetween(15, 25))) .toArray(Strings.EMPTY_ARRAY); - return new GetShutdownStatusAction.Request(newNodeIds); + return new GetShutdownStatusAction.Request(TEST_REQUEST_TIMEOUT, newNodeIds); } } diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java index a4f73e0e3bdac..6076214833704 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorService.java @@ -127,7 +127,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources "Snapshot Lifecycle Management is not running", createDetails(verbose, Collections.emptyList(), slmMetadata, currentMode), impacts, - List.of(SLM_NOT_RUNNING) + verbose ? List.of(SLM_NOT_RUNNING) : List.of() ); } else { List unhealthyPolicies = slmMetadata.getSnapshotConfigurations() @@ -178,20 +178,22 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources "Encountered [" + unhealthyPolicies.size() + "] unhealthy snapshot lifecycle management policies.", createDetails(verbose, unhealthyPolicies, slmMetadata, currentMode), impacts, - List.of( - new Diagnosis( - checkRecentlyFailedSnapshots(cause, action), - List.of( - new Diagnosis.Resource( - Diagnosis.Resource.Type.SLM_POLICY, - unhealthyPolicies.stream() - .map(SnapshotLifecyclePolicyMetadata::getId) - .limit(Math.min(unhealthyPolicies.size(), maxAffectedResourcesCount)) - .toList() + verbose + ? List.of( + new Diagnosis( + checkRecentlyFailedSnapshots(cause, action), + List.of( + new Diagnosis.Resource( + Diagnosis.Resource.Type.SLM_POLICY, + unhealthyPolicies.stream() + .map(SnapshotLifecyclePolicyMetadata::getId) + .limit(Math.min(unhealthyPolicies.size(), maxAffectedResourcesCount)) + .toList() + ) ) ) ) - ) + : List.of() ); } @@ -228,30 +230,29 @@ private static HealthIndicatorDetails createDetails( SnapshotLifecycleMetadata metadata, OperationMode mode ) { - if (verbose) { - Map details = new LinkedHashMap<>(); - details.put("slm_status", mode); - details.put("policies", metadata.getSnapshotConfigurations().size()); - if (unhealthyPolicies.size() > 0) { - details.put( - "unhealthy_policies", - Map.of( - "count", - unhealthyPolicies.size(), - "invocations_since_last_success", - unhealthyPolicies.stream() - .collect( - Collectors.toMap( - SnapshotLifecyclePolicyMetadata::getId, - SnapshotLifecyclePolicyMetadata::getInvocationsSinceLastSuccess - ) - ) - ) - ); - } - return new SimpleHealthIndicatorDetails(details); - } else { + if (verbose == false) { return HealthIndicatorDetails.EMPTY; } + Map details = new LinkedHashMap<>(); + details.put("slm_status", mode); + details.put("policies", metadata.getSnapshotConfigurations().size()); + if (unhealthyPolicies.size() > 0) { + details.put( + "unhealthy_policies", + Map.of( + "count", + unhealthyPolicies.size(), + "invocations_since_last_success", + unhealthyPolicies.stream() + .collect( + Collectors.toMap( + SnapshotLifecyclePolicyMetadata::getId, + SnapshotLifecyclePolicyMetadata::getInvocationsSinceLastSuccess + ) + ) + ) + ); + } + return new SimpleHealthIndicatorDetails(details); } } diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorServiceTests.java index 2d5a6a800ffa2..9b0d20308cf76 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SlmHealthIndicatorServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.Diagnosis.Resource.Type; +import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.ImpactArea; @@ -355,6 +356,34 @@ public void testSnapshotPolicyExceedsWarningThresholdPredicate() { assertThat(SlmHealthIndicatorService.snapshotFailuresExceedWarningCount(1L, slmPolicyMetadata), is(false)); } + public void testSkippingFieldsWhenVerboseIsFalse() { + var status = randomFrom(STOPPED, STOPPING); + var clusterState = createClusterStateWith(new SnapshotLifecycleMetadata(createSlmPolicy(), status, null)); + var service = createSlmHealthIndicatorService(clusterState); + + assertThat( + service.calculate(false, HealthInfo.EMPTY_HEALTH_INFO), + equalTo( + new HealthIndicatorResult( + NAME, + YELLOW, + "Snapshot Lifecycle Management is not running", + HealthIndicatorDetails.EMPTY, + Collections.singletonList( + new HealthIndicatorImpact( + NAME, + SlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID, + 3, + "Scheduled snapshots are not running. New backup snapshots will not be created automatically.", + List.of(ImpactArea.BACKUP) + ) + ), + List.of() + ) + ) + ); + } + // We expose the indicator name and the diagnoses in the x-pack usage API. In order to index them properly in a telemetry index // they need to be declared in the health-api-indexer.edn in the telemetry repository. public void testMappedFieldsForTelemetry() { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index a8f437f476ada..44cfa99c003d7 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -482,6 +482,11 @@ protected void checkIncomingMergeType(FieldMapper mergeWith) { super.checkIncomingMergeType(mergeWith); } + @Override + protected SyntheticSourceMode syntheticSourceMode() { + return SyntheticSourceMode.FALLBACK; + } + public static class GeoShapeDocValuesField extends AbstractScriptFieldFactory implements Field, diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoFieldMapperTests.java index d15c124112d1d..64f9f2df4c707 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoFieldMapperTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperTestCase; +import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; @@ -16,6 +17,7 @@ import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; /** Base class for testing geo field mappers */ @@ -25,7 +27,15 @@ public abstract class GeoFieldMapperTests extends MapperTestCase { @Override protected Collection getPlugins() { - return Collections.singletonList(new LocalStateSpatialPlugin()); + var plugin = new LocalStateSpatialPlugin(); + plugin.loadExtensions(new ExtensiblePlugin.ExtensionLoader() { + @Override + public List loadExtensions(Class extensionPointType) { + return List.of(); + } + }); + + return Collections.singletonList(plugin); } @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 0ddb38ea500f1..507fe80291d89 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -7,8 +7,16 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.GeoJson; +import org.elasticsearch.common.geo.GeometryNormalizer; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper; @@ -22,13 +30,17 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -422,7 +434,161 @@ protected Object generateRandomInputValue(MappedFieldType ft) { @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - throw new AssumptionViolatedException("not supported"); + // Almost like GeoShapeType but no circles + enum ShapeType { + POINT, + LINESTRING, + POLYGON, + MULTIPOINT, + MULTILINESTRING, + MULTIPOLYGON, + GEOMETRYCOLLECTION, + ENVELOPE + } + + return new SyntheticSourceSupport() { + @Override + public boolean preservesExactSource() { + return true; + } + + @Override + public SyntheticSourceExample example(int maxValues) throws IOException { + if (randomBoolean()) { + Value v = generateValue(); + if (v.blockLoaderOutput != null) { + return new SyntheticSourceExample(v.input, v.output, v.blockLoaderOutput, this::mapping); + } + return new SyntheticSourceExample(v.input, v.output, this::mapping); + } + + List values = randomList(1, maxValues, this::generateValue); + List in = values.stream().map(Value::input).toList(); + List out = values.stream().map(Value::output).toList(); + + // Block loader infrastructure will never return nulls + List outBlockList = values.stream() + .filter(v -> v.input != null) + .map(v -> v.blockLoaderOutput != null ? v.blockLoaderOutput : v.output) + .toList(); + var outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + + return new SyntheticSourceExample(in, out, outBlock, this::mapping); + } + + private record Value(Object input, Object output, String blockLoaderOutput) { + Value(Object input, Object output) { + this(input, output, null); + } + } + + private Value generateValue() { + if (ignoreMalformed && randomBoolean()) { + List> choices = List.of( + () -> randomAlphaOfLength(3), + ESTestCase::randomInt, + ESTestCase::randomLong, + ESTestCase::randomFloat, + ESTestCase::randomDouble + ); + Object v = randomFrom(choices).get(); + return new Value(v, v); + } + if (randomBoolean()) { + return new Value(null, null); + } + + var type = randomFrom(ShapeType.values()); + var isGeoJson = randomBoolean(); + + switch (type) { + case POINT -> { + var point = GeometryTestUtils.randomPoint(false); + return value(point, isGeoJson); + } + case LINESTRING -> { + var line = GeometryTestUtils.randomLine(false); + return value(line, isGeoJson); + } + case POLYGON -> { + var polygon = GeometryTestUtils.randomPolygon(false); + return value(polygon, isGeoJson); + } + case MULTIPOINT -> { + var multiPoint = GeometryTestUtils.randomMultiPoint(false); + return value(multiPoint, isGeoJson); + } + case MULTILINESTRING -> { + var multiPoint = GeometryTestUtils.randomMultiLine(false); + return value(multiPoint, isGeoJson); + } + case MULTIPOLYGON -> { + var multiPolygon = GeometryTestUtils.randomMultiPolygon(false); + return value(multiPolygon, isGeoJson); + } + case GEOMETRYCOLLECTION -> { + var multiPolygon = GeometryTestUtils.randomGeometryCollectionWithoutCircle(false); + return value(multiPolygon, isGeoJson); + } + case ENVELOPE -> { + var rectangle = GeometryTestUtils.randomRectangle(); + var wktString = WellKnownText.toWKT(rectangle); + + return new Value(wktString, wktString); + } + default -> throw new UnsupportedOperationException("Unsupported shape"); + } + } + + private static Value value(Geometry geometry, boolean isGeoJson) { + var wktString = WellKnownText.toWKT(geometry); + var normalizedWktString = GeometryNormalizer.needsNormalize(Orientation.RIGHT, geometry) + ? WellKnownText.toWKT(GeometryNormalizer.apply(Orientation.RIGHT, geometry)) + : wktString; + + if (isGeoJson) { + var map = GeoJson.toMap(geometry); + return new Value(map, map, normalizedWktString); + } + + return new Value(wktString, wktString, normalizedWktString); + } + + private void mapping(XContentBuilder b) throws IOException { + b.field("type", "geo_shape"); + if (rarely()) { + b.field("index", false); + } + if (rarely()) { + b.field("doc_values", false); + } + if (ignoreMalformed) { + b.field("ignore_malformed", true); + } + } + + @Override + public List invalidExample() throws IOException { + return List.of(); + } + }; + } + + @Override + protected Function loadBlockExpected(BlockReaderSupport blockReaderSupport, boolean columnReader) { + return v -> asWKT((BytesRef) v); + } + + protected static Object asWKT(BytesRef value) { + // Internally we use WKB in BytesRef, but for test assertions we want to use WKT for readability + Geometry geometry = WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, value.bytes); + return WellKnownText.toWKT(geometry); + } + + @Override + protected BlockReaderSupport getSupportedReaders(MapperService mapper, String loaderFieldName) { + // Synthetic source is currently not supported. + return new BlockReaderSupport(false, false, mapper, loaderFieldName); } @Override diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml index 45ec59a419e13..b846dbe858f61 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/100_synthetic_source.yml @@ -1,4 +1,4 @@ -constant_keyword: +aggregate_metric_double: - requires: cluster_features: ["gte_v8.5.0"] reason: synthetic source support added in 8.5.0 @@ -51,3 +51,54 @@ constant_keyword: min: 18.2 max: 100.0 value_count: 50 + +--- +aggregate_metric_double with ignore_malformed: + - requires: + cluster_features: ["mapper.track_ignored_source"] + reason: requires tracking ignored source + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + metric: + type: aggregate_metric_double + metrics: [min, max, value_count] + default_metric: max + ignore_malformed: true + + - do: + index: + index: test + id: "1" + refresh: true + body: + metric: + min: 18.2 + max: 100 + field: "field" + sub: + array: [1, 2, 3] + field: "field" + value_count: 50 + + - do: + search: + index: test + + - match: + hits.hits.0._source: + metric: + min: 18.2 + max: 100 + field: "field" + sub: + array: [1, 2, 3] + field: "field" + value_count: 50 + diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index b703335940056..b91343d03d3d4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -22,7 +22,6 @@ esql.query: body: query: 'FROM test | sort emp_no | eval ip = to_ip(coalesce(ip1.keyword, "255.255.255.255")) | keep emp_no, ip' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -42,8 +41,6 @@ esql.query: body: query: 'FROM test | sort emp_no | eval x1 = concat(ip1, ip2), x2 = coalesce(x1, "255.255.255.255"), x3 = to_ip(x2) | keep emp_no, x*' - version: 2024.04.01 - - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } - match: { columns.1.name: "x1" } @@ -111,7 +108,6 @@ esql.query: body: query: 'from index* metadata _index | limit 5 | sort _index desc' - version: 2024.04.01 - match: { columns.0.name: http.headers } - match: { columns.0.type: unsupported } - match: { columns.1.name: http.headers.location } @@ -174,7 +170,6 @@ esql.query: body: query: 'from npe_single_value* | stats x = avg(field1) | limit 10' - version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: double } - length: { values: 1 } @@ -184,7 +179,6 @@ esql.query: body: query: 'from npe_single_value* | stats x = avg(field2) | limit 10' - version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: double } - length: { values: 1 } @@ -194,7 +188,6 @@ esql.query: body: query: 'from npe_single_value* | stats x = avg(field3) | limit 10' - version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: double } - length: { values: 1 } @@ -238,7 +231,6 @@ esql.query: body: query: 'from idx_with_date_ip_txt | where id == 1 | eval x = date_format(text, date), y = date_extract(text2, date), p = date_parse(text, "2024-03-14") | keep x, y, p | limit 1' - version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: keyword } - match: { columns.1.name: y } @@ -252,7 +244,6 @@ esql.query: body: query: 'from idx_with_date_ip_txt | where id > 1 | eval x = cidr_match(ip, text) | sort id | keep id, x | limit 2' - version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: long } - match: { columns.1.name: x } @@ -296,7 +287,6 @@ esql.query: body: query: 'from idx_with_multivalues | eval b = mv_dedupe(boolean), k = mv_dedupe(keyword), i = mv_dedupe(integer), l = mv_dedupe(long), d = mv_dedupe(double) | keep b, k, i, l, d | limit 1' - version: 2024.04.01 - match: { columns.0.name: b } - match: { columns.0.type: boolean } - match: { columns.1.name: k } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml index da87251c35966..52d390e7b288b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml @@ -118,7 +118,6 @@ setup: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: "color"} - match: {columns.0.type: "keyword"} @@ -140,7 +139,6 @@ setup: esql.query: body: query: 'from test | sort count | limit 1' - version: 2024.04.01 - match: {columns.1.name: "count"} - match: {columns.1.type: "long"} @@ -153,7 +151,6 @@ setup: body: query: 'from test | keep data | sort data | limit 2' columnar: true - version: 2024.04.01 - match: {columns.0.name: "data"} - match: {columns.0.type: "long"} @@ -165,7 +162,6 @@ setup: esql.query: body: query: 'from test | eval x = count + 7 | sort x | limit 1' - version: 2024.04.01 - match: {columns.0.name: "color"} - match: {columns.1.name: "count"} @@ -183,7 +179,6 @@ setup: esql.query: body: query: 'from test | sort time | eval x = data + 1, y = data_d + count, z = x + y | keep data, x, y, z, time | limit 2' - version: 2024.04.01 - match: {columns.0.name: "data"} - match: {columns.0.type: "long"} @@ -214,7 +209,6 @@ setup: body: query: 'from test | sort time | limit 2 | keep count' columnar: true - version: 2024.04.01 - length: {columns: 1} - match: {columns.0.name: "count"} @@ -228,7 +222,6 @@ setup: body: query: 'from test | sort time desc | limit 2 | keep count' columnar: true - version: 2024.04.01 - length: {columns: 1} - match: {columns.0.name: "count"} @@ -242,7 +235,6 @@ setup: body: query: 'from test | sort time | limit 2 | keep count | eval x = count + 1' columnar: true - version: 2024.04.01 - length: {columns: 2} - match: {columns.0.name: "count"} @@ -260,7 +252,6 @@ setup: body: query: 'from test | sort time | limit 2 | keep count | eval x = count + 1 | keep x' columnar: true - version: 2024.04.01 - length: {columns: 1} - match: {columns.0.name: "x"} @@ -274,7 +265,6 @@ setup: esql.query: body: query: 'from test | limit 10 | sort time | limit 1' - version: 2024.04.01 - length: {columns: 6} - length: {values: 1} @@ -288,7 +278,6 @@ setup: body: query: 'row a = ? | eval b = ?, c = 1 + ?' params: ["foo", 15, 10] - version: 2024.04.01 - length: {columns: 3} - match: {columns.0.name: "a"} @@ -308,7 +297,6 @@ setup: body: query: 'from test | where color == ? and count == ? and time == ? | keep data, count, color' params: ["green", 44, 1674835275193] - version: 2024.04.01 - length: {columns: 3} - match: {columns.0.name: "data"} @@ -327,7 +315,6 @@ setup: body: query: 'from test | eval x = ?, y = ?, z = ?, t = ?, u = ?, v = ? | keep x, y, z, t, u, v | limit 3' params: [{"value": 1, "type": "keyword"}, {"value": 2, "type": "double"}, null, true, 123, {"value": 123, "type": "long"}] - version: 2024.04.01 - length: {columns: 6} - match: {columns.0.name: "x"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml index f6271ab02b816..a18dbba1abfab 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml @@ -122,7 +122,6 @@ row wise and keep null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: false - version: 2024.04.01 - length: {columns: 8} - match: {columns.0.name: "always_null"} @@ -154,7 +153,6 @@ row wise and drop null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: false - version: 2024.04.01 - length: {all_columns: 8} - match: {all_columns.0.name: "always_null"} @@ -198,7 +196,6 @@ columnar and keep null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: true - version: 2024.04.01 - length: {columns: 8} - match: {columns.0.name: "always_null"} @@ -230,7 +227,6 @@ columnar and drop null: body: query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' columnar: true - version: 2024.04.01 - length: {all_columns: 8} - match: {all_columns.0.name: "always_null"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml index ba5de9765db17..3fadd372936a8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_insensitive_equals.yml @@ -46,7 +46,6 @@ setup: esql.query: body: query: 'FROM test | where keyword =~ keywordUpper | keep id, keyword, keywordUpper' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -66,7 +65,6 @@ setup: esql.query: body: query: 'FROM test | where text =~ textCamel | keep id, text, textCamel' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -87,7 +85,6 @@ setup: esql.query: body: query: 'FROM test | where keyword =~ text | keep id, keyword, text' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -108,7 +105,6 @@ setup: esql.query: body: query: 'FROM test | where keywordUpper =~ textCamel | keep id, keywordUpper, textCamel | sort id' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -130,7 +126,6 @@ setup: esql.query: body: query: 'FROM test | where keywordUpper =~ "fo*" | keep id, keywordUpper' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -145,7 +140,6 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "foo*" | keep id, wildcard' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -161,7 +155,6 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "fOo*" | keep id, wildcard' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -178,7 +171,6 @@ setup: esql.query: body: query: 'FROM test | where keywordUpper =~ "fo?" | keep id, keywordUpper' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -193,7 +185,6 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "bar?" | keep id, wildcard' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -209,7 +200,6 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ "bAr?" | keep id, wildcard' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -228,7 +218,6 @@ setup: esql.query: body: query: 'FROM test | where text =~ "Fo*" | keep id, text | sort id' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -243,7 +232,6 @@ setup: esql.query: body: query: 'FROM test | where wildcardText =~ "fOo*" | keep id, wildcardText' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -259,7 +247,6 @@ setup: esql.query: body: query: 'FROM test | where wildcardText =~ "bAr?" | keep id, wildcardText' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -278,7 +265,6 @@ setup: esql.query: body: query: 'FROM test | where text =~ "fo\\*" | keep id, text' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -296,7 +282,6 @@ setup: esql.query: body: query: 'FROM test | where wildcard =~ wildcardText | keep id, wildcard, wildcardText | sort id' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } @@ -316,7 +301,6 @@ setup: esql.query: body: query: 'FROM test | where NOT wildcard =~ wildcardText | keep id, wildcard, wildcardText | sort id' - version: 2024.04.01 - match: { columns.0.name: "id" } - match: { columns.0.type: "long" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml index ec415cbfa12d9..17034de677b8d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/120_profile.yml @@ -130,7 +130,6 @@ avg 8.14 or after: query: 'FROM test | STATS AVG(data) | LIMIT 1' columnar: true profile: true - version: 2024.04.01 - match: {columns.0.name: "AVG(data)"} - match: {columns.0.type: "double"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml index 2274d5973087d..053d33ee9bf43 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml @@ -97,7 +97,6 @@ geo_point: esql.query: body: query: 'from geo_points | sort id' - version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: location } @@ -115,7 +114,6 @@ geo_point unsortable: esql.query: body: query: 'from geo_points | sort location' - version: 2024.04.01 --- geo_point unsortable with limit: @@ -124,7 +122,6 @@ geo_point unsortable with limit: esql.query: body: query: 'from geo_points | LIMIT 10 | sort location' - version: 2024.04.01 --- geo_point unsortable with limit from row: @@ -133,7 +130,6 @@ geo_point unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) | limit 5 | sort pt' - version: 2024.04.01 --- values unsupported for geo_point: @@ -142,7 +138,6 @@ values unsupported for geo_point: esql.query: body: query: 'FROM geo_points | STATS VALUES(location)' - version: 2024.04.01 --- cartesian_point: @@ -152,7 +147,6 @@ cartesian_point: esql.query: body: query: 'from cartesian_points | sort id' - version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: location } @@ -170,7 +164,6 @@ cartesian_point unsortable: esql.query: body: query: 'from cartesian_points | sort location' - version: 2024.04.01 --- cartesian_point unsortable with limit: @@ -179,7 +172,6 @@ cartesian_point unsortable with limit: esql.query: body: query: 'from cartesian_points | LIMIT 10 | sort location' - version: 2024.04.01 --- cartesian_point unsortable with limit from row: @@ -188,7 +180,6 @@ cartesian_point unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) | limit 5 | sort pt' - version: 2024.04.01 --- geo_shape: @@ -198,7 +189,6 @@ geo_shape: esql.query: body: query: 'from geo_shapes | sort id' - version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: shape } @@ -216,7 +206,6 @@ geo_shape unsortable: esql.query: body: query: 'from geo_shapes | sort shape' - version: 2024.04.01 --- geo_shape unsortable with limit: @@ -225,7 +214,6 @@ geo_shape unsortable with limit: esql.query: body: query: 'from geo_shapes | LIMIT 10 | sort shape' - version: 2024.04.01 --- geo_shape unsortable with limit from row: @@ -234,7 +222,6 @@ geo_shape unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL shape = TO_GEOSHAPE(wkt) | limit 5 | sort shape' - version: 2024.04.01 --- cartesian_shape: @@ -244,7 +231,6 @@ cartesian_shape: esql.query: body: query: 'from cartesian_shapes | sort id' - version: 2024.04.01 - match: { columns.0.name: id } - match: { columns.0.type: integer } - match: { columns.1.name: shape } @@ -262,7 +248,6 @@ cartesian_shape unsortable: esql.query: body: query: 'from cartesian_shapes | sort shape' - version: 2024.04.01 --- cartesian_shape unsortable with limit: @@ -271,7 +256,6 @@ cartesian_shape unsortable with limit: esql.query: body: query: 'from cartesian_shapes | LIMIT 10 | sort shape' - version: 2024.04.01 --- cartesian_shape unsortable with limit from row: @@ -280,4 +264,3 @@ cartesian_shape unsortable with limit from row: esql.query: body: query: 'ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL shape = TO_CARTESIANSHAPE(wkt) | limit 5 | sort shape' - version: 2024.04.01 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 79c4cdde43429..d6c1c6c97944a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -41,7 +41,6 @@ setup: esql.query: body: query: 'from test metadata _ignored | where case == "ok" | limit 2 | keep integer, keyword, _ignored' - version: 2024.04.01 - length: { columns: 3 } - match: { columns.0.name: integer } @@ -62,7 +61,6 @@ setup: esql.query: body: query: 'from test metadata _ignored | where case == "integer_ignored" | limit 2 | keep integer, keyword, _ignored' - version: 2024.04.01 - length: { columns: 3 } - match: { columns.0.name: integer } @@ -83,7 +81,6 @@ setup: esql.query: body: query: 'from test metadata _ignored | where case == "all_ignored" | limit 2 | keep integer, keyword, _ignored' - version: 2024.04.01 - length: { columns: 3 } - match: { columns.0.name: integer } @@ -104,7 +101,6 @@ setup: esql.query: body: query: 'from test metadata _ignored | where _ignored == "keyword" | limit 3 | stats count(*)' - version: 2024.04.01 - length: { columns: 1 } - length: { values: 1 } @@ -118,7 +114,6 @@ setup: esql.query: body: query: 'from test metadata _ignored | limit 3 | stats count = count(*) by _ignored' - version: 2024.04.01 - length: { columns: 2 } - length: { values: 3 } @@ -139,7 +134,6 @@ setup: esql.query: body: query: 'from test metadata _ignored | limit 3 | stats count_distinct(_ignored)' - version: 2024.04.01 - length: { columns: 1 } - length: { values: 1 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml index 69a9213980f98..672dfa1503c40 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/20_aggs.yml @@ -120,7 +120,6 @@ setup: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: "color"} - match: {columns.0.type: "keyword"} @@ -147,7 +146,6 @@ setup: body: query: 'from test | where color == "red" | stats avg(data) by color' columnar: true - version: 2024.04.01 - match: {columns.0.name: "avg(data)"} - match: {columns.0.type: "double"} @@ -164,7 +162,6 @@ setup: body: query: 'from test | stats avg(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "avg(count)"} - match: {columns.0.type: "double"} @@ -179,7 +176,6 @@ setup: body: query: 'from test | stats f1 = avg(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "f1"} - match: {columns.0.type: "double"} @@ -194,7 +190,6 @@ setup: body: query: 'from test | stats count(data)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "count(data)"} - match: {columns.0.type: "long"} @@ -209,7 +204,6 @@ setup: body: query: 'from test | stats dataCount = count(data)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "dataCount"} - match: {columns.0.type: "long"} @@ -224,7 +218,6 @@ setup: body: query: 'from test | stats min(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "min(count)"} - match: {columns.0.type: "long"} @@ -239,7 +232,6 @@ setup: body: query: 'from test | stats minCount=min(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "minCount"} - match: {columns.0.type: "long"} @@ -254,7 +246,6 @@ setup: body: query: 'from test | stats max(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "max(count)"} - match: {columns.0.type: "long"} @@ -269,7 +260,6 @@ setup: body: query: 'from test | stats maxCount=max(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "maxCount"} - match: {columns.0.type: "long"} @@ -282,7 +272,6 @@ setup: body: query: 'from test | stats avg(count) by color | sort color | limit 2' columnar: true - version: 2024.04.01 - match: {columns.0.name: "avg(count)"} - match: {columns.0.type: "double"} @@ -300,7 +289,6 @@ setup: body: query: 'from test | stats med=median(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -315,7 +303,6 @@ setup: body: query: 'from test | stats med=median(count_d)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -330,7 +317,6 @@ setup: body: query: 'from test | stats med=median(count) by color | sort med' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -348,7 +334,6 @@ setup: body: query: 'from test | stats med=median(count_d) by color | sort med' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -366,7 +351,6 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -381,7 +365,6 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count_d)' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -396,7 +379,6 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count) by color | sort color' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -414,7 +396,6 @@ setup: body: query: 'from test | stats med=median_absolute_deviation(count_d) by color | sort color' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} @@ -431,7 +412,6 @@ setup: esql.query: body: query: 'from test | stats avg_count = avg(count) | eval x = avg_count + 7' - version: 2024.04.01 - length: {values: 1} - length: {values.0: 2} @@ -445,7 +425,6 @@ setup: esql.query: body: query: 'from test | stats x = avg(count) | where x > 100' - version: 2024.04.01 - length: {values: 0} @@ -455,7 +434,6 @@ setup: esql.query: body: query: 'from test | eval nullsum = count_d + null | sort nullsum | limit 1' - version: 2024.04.01 - length: {columns: 8} - length: {values: 1} @@ -471,7 +449,6 @@ setup: esql.query: body: query: 'row a = 1, b = 2, c = null | eval z = c + b + a' - version: 2024.04.01 - length: {columns: 4} - length: {values: 1} @@ -497,7 +474,6 @@ setup: esql.query: body: query: 'from test | eval nullsum = count_d + null | stats count(nullsum)' - version: 2024.04.01 - length: {columns: 1} - length: {values: 1} @@ -514,7 +490,6 @@ setup: esql.query: body: query: 'row l=1, d=1.0, ln=1 + null, dn=1.0 + null | stats sum(l), sum(d), sum(ln), sum(dn)' - version: 2024.04.01 - length: {columns: 4} - length: {values: 1} @@ -541,7 +516,6 @@ grouping on text: body: query: 'FROM test | STATS med=median(count) BY text | SORT med' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml index 1980ed8bb040c..0684939932774 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/25_aggs_on_null.yml @@ -39,7 +39,6 @@ group on null: body: query: 'FROM test | STATS med=median(never_null) BY always_null | LIMIT 1' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - match: {columns.1.name: "always_null"} @@ -55,7 +54,6 @@ group on null, long: body: query: 'FROM test | STATS med=median(sometimes_null) BY always_null, never_null | SORT always_null, never_null | LIMIT 10' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - match: {columns.1.name: "always_null"} @@ -74,7 +72,6 @@ agg on null: body: query: 'FROM test | STATS med=median(always_null) | LIMIT 1' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - length: {values: 1} @@ -88,7 +85,6 @@ agg on missing: body: query: 'FROM test | STATS med=median(missing) | LIMIT 1' columnar: true - version: 2024.04.01 --- group on missing: @@ -98,7 +94,6 @@ group on missing: body: query: 'FROM test | STATS med=median(never_null) BY missing | LIMIT 1' columnar: true - version: 2024.04.01 --- agg on half missing: @@ -124,7 +119,6 @@ agg on half missing: body: query: 'FROM test* | STATS med=median(missing) | LIMIT 1' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - length: {values: 1} @@ -154,7 +148,6 @@ group on half missing: body: query: 'FROM test,test2 | STATS med=median(never_null) BY missing | LIMIT 1' columnar: true - version: 2024.04.01 - match: {columns.0.name: "med"} - match: {columns.0.type: "double"} - match: {columns.1.name: "missing"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml index bbf8b33445fa3..cfc7f2e4036fb 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml @@ -35,7 +35,6 @@ constant_keyword: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: color } - match: { columns.0.type: keyword } - match: { columns.1.name: kind } @@ -50,7 +49,6 @@ constant_keyword: esql.query: body: query: 'from test | eval l=length(kind) | keep l' - version: 2024.04.01 - match: {columns.0.name: l} - match: {columns.0.type: integer} - length: {values: 1} @@ -81,7 +79,6 @@ constant_keyword with null value: esql.query: body: query: 'from test | limit 1' - version: 2024.04.01 - match: { columns.0.name: color } - match: { columns.0.type: keyword } - match: { columns.1.name: kind } @@ -115,7 +112,6 @@ multivalued keyword: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} @@ -147,7 +143,6 @@ keyword no doc_values: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} @@ -178,7 +173,6 @@ wildcard: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} @@ -190,7 +184,6 @@ wildcard: esql.query: body: query: 'from test | eval l=length(card) | keep l' - version: 2024.04.01 - match: {columns.0.name: l} - match: {columns.0.type: integer} - length: {values: 1} @@ -231,7 +224,6 @@ numbers: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: d} - match: {columns.0.type: double} - match: {columns.1.name: i} @@ -283,7 +275,6 @@ small_numbers: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: b} - match: {columns.0.type: integer} - match: {columns.1.name: f} @@ -304,7 +295,6 @@ small_numbers: esql.query: body: query: 'from test | eval sum_d = b + f + hf + s, sum_i = b + s | keep sum_d, sum_i' - version: 2024.04.01 - match: {columns.0.name: sum_d} - match: {columns.0.type: double} - match: {columns.1.name: sum_i} @@ -319,7 +309,6 @@ small_numbers: esql.query: body: query: 'from test | eval r_f = round(f), r_hf = round(hf) | keep r_f, r_hf' - version: 2024.04.01 - match: {columns.0.name: r_f} - match: {columns.0.type: double} - match: {columns.1.name: r_hf} @@ -356,7 +345,6 @@ scaled_float: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: d} - match: {columns.0.type: double} - match: {columns.1.name: f} @@ -371,7 +359,6 @@ scaled_float: esql.query: body: query: 'from test | eval sum = d + f | keep sum' - version: 2024.04.01 - match: {columns.0.name: sum} - match: {columns.0.type: double} - length: {values: 1} @@ -402,7 +389,6 @@ multivalued boolean: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: booleans } - match: { columns.0.type: boolean } - length: { values: 1 } @@ -435,7 +421,6 @@ ip: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: ip } - match: { columns.0.type: ip } - match: { columns.1.name: keyword } @@ -450,7 +435,6 @@ ip: esql.query: body: query: 'from test | where keyword == "127.0.0.2" | rename ip as IP | drop keyword' - version: 2024.04.01 - match: {columns.0.name: IP } - match: {columns.0.type: ip } - length: {values: 1 } @@ -506,7 +490,6 @@ alias: esql.query: body: query: 'from test | keep foo, bar, level1.level2, level2_alias, some_long, some_long_alias, some_long_alias2, some_date, some_date_alias | sort level2_alias' - version: 2024.04.01 - match: { columns.0.name: foo } - match: { columns.0.type: keyword } - match: { columns.1.name: bar } @@ -551,7 +534,6 @@ alias: esql.query: body: query: 'from test | where bar == "abc" | keep foo, bar, level1.level2, level2_alias' - version: 2024.04.01 - match: { columns.0.name: foo } - match: { columns.0.type: keyword } - match: { columns.1.name: bar } @@ -572,7 +554,6 @@ alias: esql.query: body: query: 'from test | where level2_alias == 10 | keep foo, bar, level1.level2, level2_alias' - version: 2024.04.01 - match: { columns.0.name: foo } - match: { columns.0.type: keyword } - match: { columns.1.name: bar } @@ -593,7 +574,6 @@ alias: esql.query: body: query: 'from test | where level2_alias == 20' - version: 2024.04.01 - length: { values: 0 } - do: @@ -602,7 +582,6 @@ alias: esql.query: body: query: 'from test | stats x = max(level2_alias)' - version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: long } - length: { values: 1 } @@ -633,7 +612,6 @@ version: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: version } - match: { columns.0.type: version } - length: { values: 1 } @@ -667,7 +645,6 @@ id: esql.query: body: query: 'from test metadata _id | keep _id, kw' - version: 2024.04.01 - match: { columns.0.name: _id } - match: { columns.0.type: keyword } - length: { values: 1 } @@ -699,7 +676,6 @@ unsigned_long: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: number } - match: { columns.0.type: unsigned_long } - length: { values: 1 } @@ -723,7 +699,6 @@ _source: esql.query: body: query: 'FROM test METADATA _source | KEEP _source | LIMIT 1' - version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } @@ -759,7 +734,6 @@ _source keep all: esql.query: body: query: 'FROM test METADATA _source | LIMIT 1' - version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } @@ -796,7 +770,6 @@ _source disabled: esql.query: body: query: 'FROM test METADATA _source | KEEP _source | LIMIT 1' - version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } @@ -825,7 +798,6 @@ text: esql.query: body: query: 'FROM test | LIMIT 1' - version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: text} - length: {values: 1} @@ -857,7 +829,6 @@ synthetic _source text stored: esql.query: body: query: 'FROM test | LIMIT 1' - version: 2024.04.01 - match: {columns.0.name: card} - match: {columns.0.type: text} - length: {values: 1} @@ -891,7 +862,6 @@ synthetic _source text with parent keyword: esql.query: body: query: 'FROM test | KEEP card.text | LIMIT 1' - version: 2024.04.01 - match: {columns.0.name: card.text} - match: {columns.0.type: text} - length: {values: 1} @@ -925,7 +895,6 @@ geo_point: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: location } - match: { columns.0.type: geo_point } - length: { values: 1 } @@ -959,7 +928,6 @@ cartesian_point: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: location } - match: { columns.0.type: cartesian_point } - length: { values: 1 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index c09bc17ab9a5c..8f291600acbf6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -116,7 +116,6 @@ load everything: esql.query: body: query: 'from test metadata _id' - version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -142,7 +141,6 @@ load a document: esql.query: body: query: 'from test | where @timestamp == "2021-04-28T18:50:23.142Z"' - version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} @@ -161,7 +159,6 @@ filter on counter without cast: esql.query: body: query: 'from test | where k8s.pod.network.tx == 1434577921' - version: 2024.04.01 --- cast counter then filter: @@ -169,7 +166,6 @@ cast counter then filter: esql.query: body: query: 'from test | where k8s.pod.network.tx::long == 2005177954 and k8s.pod.network.rx::integer == 801479970 | sort @timestamp | limit 10' - version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} - match: {values.0.0: "2021-04-28T18:50:24.467Z"} @@ -187,7 +183,6 @@ sort on counter without cast: esql.query: body: query: 'from test | KEEP k8s.pod.network.tx | sort @k8s.pod.network.tx | limit 1' - version: 2024.04.01 --- cast then sort on counter: @@ -195,7 +190,6 @@ cast then sort on counter: esql.query: body: query: 'from test | KEEP k8s.pod.network.tx | EVAL tx=to_long(k8s.pod.network.tx) | sort tx | limit 1' - version: 2024.04.01 - length: {values: 1} - match: {values.0.0: 1434521831 } @@ -207,7 +201,6 @@ from doc with aggregate_metric_double: esql.query: body: query: 'from test2' - version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -228,7 +221,6 @@ stats on aggregate_metric_double: esql.query: body: query: 'FROM test2 | STATS max(agg_metric) BY dim' - version: 2024.04.01 --- from index pattern unsupported counter: @@ -238,7 +230,6 @@ from index pattern unsupported counter: esql.query: body: query: 'FROM test*' - version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -267,7 +258,6 @@ from index pattern explicit counter use: esql.query: body: query: 'FROM test* | keep *.tx' - version: 2024.04.01 --- @@ -288,7 +278,6 @@ _source: esql.query: body: query: 'FROM test METADATA _source | WHERE @timestamp == "2021-04-28T18:50:23.142Z" | KEEP _source | LIMIT 1' - version: 2024.04.01 - match: { columns.0.name: _source } - match: { columns.0.type: _source } - length: { values: 1 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index 1ff0b8763c2eb..c34666bb12b02 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -120,7 +120,6 @@ unsupported: esql.query: body: query: 'from test' - version: 2024.04.01 - match: { columns.0.name: aggregate_metric_double } - match: { columns.0.type: unsupported } @@ -218,7 +217,6 @@ unsupported: esql.query: body: query: 'from test | limit 0' - version: 2024.04.01 - match: { columns.0.name: aggregate_metric_double } - match: { columns.0.type: unsupported } - match: { columns.1.name: binary } @@ -285,7 +283,6 @@ unsupported: esql.query: body: query: 'from test | keep histogram | limit 0' - version: 2024.04.01 - match: { columns.0.name: histogram } - match: { columns.0.type: unsupported } - length: { values: 0 } @@ -303,7 +300,6 @@ unsupported with sort: esql.query: body: query: 'from test | sort some_doc.bar' - version: 2024.04.01 - match: { columns.0.name: aggregate_metric_double } - match: { columns.0.type: unsupported } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml index 7f78ee1c7b099..05ba568838fe4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/45_non_tsdb_counter.yml @@ -66,7 +66,6 @@ load everything: esql.query: body: query: 'from test' - version: 2024.04.01 - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} @@ -92,7 +91,6 @@ load a document: esql.query: body: query: 'from test | where @timestamp == "2021-04-28T18:50:23.142Z"' - version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} @@ -112,7 +110,6 @@ filter on counter: esql.query: body: query: 'from test | where k8s.pod.network.tx == 1434577921' - version: 2024.04.01 - length: {values: 1} - length: {values.0: 7} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml index ff04eec1d1737..d8aad27534433 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/50_index_patterns.yml @@ -51,7 +51,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -68,7 +67,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 | limit 2' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -83,7 +81,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 desc nulls last | limit 1' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -98,7 +95,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1, message2' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -117,7 +113,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1, message2 | limit 3' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -135,7 +130,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 desc nulls first, message2 | limit 3' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -152,7 +146,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1, message2 | limit 2' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -169,7 +162,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 nulls first, message2' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -190,7 +182,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 nulls first, message2 nulls first' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -211,7 +202,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | keep message1, message2 | sort message1 desc nulls first, message2 desc nulls first' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -232,7 +222,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | where message1 == "foo1" | keep message1, message2 | sort message1, message2' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -247,7 +236,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | where message1 == "foo1" or message2 == 2 | keep message1, message2 | sort message1, message2' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -264,7 +252,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | stats x = max(message2)' - version: 2024.04.01 - match: { columns.0.name: x } - match: { columns.0.type: long } - length: { values: 1 } @@ -276,7 +263,6 @@ disjoint_mappings: esql.query: body: query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' - version: 2024.04.01 - match: { columns.0.name: message1 } - match: { columns.0.type: keyword } - match: { columns.1.name: message2 } @@ -352,7 +338,6 @@ same_name_different_type: esql.query: body: query: 'from test1,test2' - version: 2024.04.01 - match: { columns.0.name: message } - match: { columns.0.type: unsupported } - length: { values: 4 } @@ -404,7 +389,6 @@ same_name_different_type_same_family: esql.query: body: query: 'from test1,test2 | sort message | keep message' - version: 2024.04.01 - match: { columns.0.name: message } - match: { columns.0.type: keyword } - length: { values: 4 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 8fbc4be3cfb3b..8a5d3be6758e3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -103,7 +103,6 @@ teardown: esql.query: body: query: 'from test | enrich city_codes_policy on city_id | keep name, city, country | sort name' - version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -127,7 +126,6 @@ teardown: esql.query: body: query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country | sort name' - version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -151,7 +149,6 @@ teardown: esql.query: body: query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country_name = country | sort name' - version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -179,7 +176,6 @@ teardown: esql.query: body: query: 'from test | keep name, city_name | enrich city_names_policy on city_name | sort name' - version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 018106cf1aa11..74c0e9ef1bb31 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -52,7 +52,6 @@ setup: esql.query: body: query: 'from test | where data > 2 | sort count desc | limit 5 | stats m = max(data)' - version: 2024.04.01 - do: {xpack.usage: {}} - match: { esql.available: true } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index a9ea9c704e6e8..076bf116292d0 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -115,7 +115,6 @@ teardown: esql.query: body: query: 'FROM events | eval ip_str = to_string(ip) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' - version: 2024.04.01 - match: { columns.0.name: "ip" } - match: { columns.0.type: "ip" } @@ -144,7 +143,6 @@ teardown: esql.query: body: query: 'FROM events_text | ENRICH networks-policy ON ip_text | sort @timestamp | KEEP ip_text, name, department, message' - version: 2024.04.01 - match: { columns.0.name: "ip_text" } - match: { columns.0.type: "text" } @@ -172,7 +170,6 @@ teardown: esql.query: body: query: 'FROM events | eval ip_str = concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' - version: 2024.04.01 --- "IP": @@ -186,7 +183,6 @@ teardown: esql.query: body: query: 'FROM events | ENRICH networks-policy ON ip | sort @timestamp | KEEP ip, name, department, message' - version: 2024.04.01 - match: { columns.0.name: "ip" } - match: { columns.0.type: "ip" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml index 288c17bac1d16..19b08007fe18a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml @@ -44,7 +44,6 @@ esql.query: body: query: 'ROW name="engineering" | ENRICH departments-policy | LIMIT 10 | KEEP name, employees' - version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -59,7 +58,6 @@ esql.query: body: query: 'ROW name="sales" | ENRICH departments-policy ON name WITH department=name | WHERE name==department | KEEP name, department | LIMIT 10' - version: 2024.04.01 - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -259,7 +257,6 @@ movies: SORT total DESC, title ASC | KEEP total, title | LIMIT 10 - version: 2024.04.01 - match: { columns.0.name: "total" } - match: { columns.0.type: "long" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml index a0ec659b21d0e..e181f77f2bcef 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/70_locale.yml @@ -32,7 +32,6 @@ setup: esql.query: body: query: 'FROM events | eval fixed_format = date_format("MMMM", @timestamp), variable_format = date_format(format, @timestamp) | sort @timestamp | keep @timestamp, fixed_format, variable_format' - version: 2024.04.01 - match: { columns.0.name: "@timestamp" } - match: { columns.0.type: "date" } @@ -55,7 +54,6 @@ setup: body: query: 'FROM events | eval fixed_format = date_format("MMMM", @timestamp), variable_format = date_format(format, @timestamp) | sort @timestamp | keep @timestamp, fixed_format, variable_format' locale: "it-IT" - version: 2024.04.01 - match: { columns.0.name: "@timestamp" } - match: { columns.0.type: "date" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index c8867b2d1bf88..9607b64385721 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -41,7 +41,6 @@ setup: esql.query: body: query: 'from test | sort emp_no' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -67,7 +66,6 @@ setup: esql.query: body: query: 'from test | where tag == "baz" | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -89,7 +87,6 @@ setup: esql.query: body: query: 'from test | where tag LIKE "*az" | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -111,7 +108,6 @@ setup: esql.query: body: query: 'from test | where tag RLIKE ".*az" | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -137,7 +133,6 @@ setup: esql.query: body: query: 'from test | where tag IN ("abc", "baz") | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -163,7 +158,6 @@ setup: esql.query: body: query: 'from test | where tag IN ("abc", tag) | keep emp_no, name, job, tag | sort emp_no' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -190,7 +184,6 @@ setup: esql.query: body: query: 'from test | where tag NOT IN ("abc", "baz") | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -212,7 +205,6 @@ setup: esql.query: body: query: 'from test | eval x = tag | where x == "baz" | keep emp_no, name, job, x' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -234,7 +226,6 @@ setup: esql.query: body: query: 'from test | where job == "IT Director" | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -256,7 +247,6 @@ setup: esql.query: body: query: 'from test | where job LIKE "*Specialist" | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -278,7 +268,6 @@ setup: esql.query: body: query: 'from test | where job RLIKE ".*Specialist" | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -301,7 +290,6 @@ setup: esql.query: body: query: 'from test | sort tag | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -325,7 +313,6 @@ setup: esql.query: body: query: 'from test | sort job | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -348,7 +335,6 @@ setup: esql.query: body: query: 'from test | sort job desc | keep emp_no, name, job, tag' - version: 2024.04.01 - match: { columns.0.name: "emp_no" } - match: { columns.0.type: "long" } @@ -372,7 +358,6 @@ setup: esql.query: body: query: 'from test | sort name | eval description = concat(name, " - ", job) | keep description' - version: 2024.04.01 - match: { columns.0.name: "description" } - match: { columns.0.type: "keyword" } @@ -393,7 +378,6 @@ setup: esql.query: body: query: 'from test | sort emp_no | eval split = split(tag, " ") | keep split' - version: 2024.04.01 - match: { columns.0.name: "split" } - match: { columns.0.type: "keyword" } @@ -411,7 +395,6 @@ setup: esql.query: body: query: 'from test | stats jobs = count(job) | keep jobs' - version: 2024.04.01 - match: { columns.0.name: "jobs" } - match: { columns.0.type: "long" } @@ -428,7 +411,6 @@ setup: esql.query: body: query: 'from test | stats tags = count(tag) | keep tags' - version: 2024.04.01 - match: { columns.0.name: "tags" } - match: { columns.0.type: "long" } @@ -445,7 +427,6 @@ setup: esql.query: body: query: 'from test | stats names = count(name) by job | keep names' - version: 2024.04.01 - match: { columns.0.name: "names" } - match: { columns.0.type: "long" } @@ -463,7 +444,6 @@ setup: esql.query: body: query: 'from test | stats names = count(name) by tag | keep names' - version: 2024.04.01 - match: { columns.0.name: "names" } - match: { columns.0.type: "long" } @@ -508,7 +488,6 @@ setup: esql.query: body: query: 'from test2 | sort emp_no | keep job' - version: 2024.04.01 - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } @@ -552,7 +531,6 @@ setup: esql.query: body: query: 'from test2 | sort emp_no | keep job' - version: 2024.04.01 - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } @@ -571,7 +549,6 @@ values: esql.query: body: query: 'FROM test | STATS job = VALUES(job) | EVAL job = MV_SORT(job) | LIMIT 1' - version: 2024.04.01 - match: { columns.0.name: "job" } - match: { columns.0.type: "text" } - length: { values: 1 } @@ -589,7 +566,6 @@ values: esql.query: body: query: 'FROM test | STATS job = VALUES(job) BY tag | EVAL job = MV_SORT(job) | SORT tag | LIMIT 10' - version: 2024.04.01 - match: { columns.0.name: "tag" } - match: { columns.0.type: "text" } - match: { columns.1.name: "job" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml index 20dd668e0f8c3..72e5f4728edc8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/81_text_exact_subfields.yml @@ -57,7 +57,6 @@ setup: esql.query: body: query: 'from test | sort emp_no | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -84,7 +83,6 @@ setup: esql.query: body: query: 'from test | where text_ignore_above == "this" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -108,7 +106,6 @@ setup: esql.query: body: query: 'from test | where text_ignore_above == "this is a long text" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -133,7 +130,6 @@ setup: esql.query: body: query: 'from test | where text_ignore_above is null | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -157,7 +153,6 @@ setup: esql.query: body: query: 'from test | where text_ignore_above is not null | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -181,7 +176,6 @@ setup: esql.query: body: query: 'from test | where text_ignore_above LIKE "*long*" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -207,7 +201,6 @@ setup: esql.query: body: query: 'from test | where text_normalizer == "CamelCase" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -232,7 +225,6 @@ setup: esql.query: body: query: 'from test | where text_normalizer == text_normalizer.raw | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -258,7 +250,6 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -283,7 +274,6 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above desc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -308,7 +298,6 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above asc nulls first | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -333,7 +322,6 @@ setup: esql.query: body: query: 'from test | sort text_ignore_above asc nulls last | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -360,7 +348,6 @@ setup: esql.query: body: query: 'from test | sort text_normalizer asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -385,7 +372,6 @@ setup: esql.query: body: query: 'from test | sort text_normalizer desc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -410,7 +396,6 @@ setup: esql.query: body: query: 'from test | sort text_normalizer.raw asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -438,7 +423,6 @@ setup: esql.query: body: query: 'from test | sort non_indexed asc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -463,7 +447,6 @@ setup: esql.query: body: query: 'from test | sort non_indexed desc | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } @@ -488,7 +471,6 @@ setup: esql.query: body: query: 'from test | where non_indexed == "foo" | keep text_ignore_above, text_ignore_above.raw, text_normalizer, text_normalizer.raw, non_indexed, non_indexed.raw' - version: 2024.04.01 - match: { columns.0.name: "text_ignore_above" } - match: { columns.0.type: "text" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml index 86ff9626e0077..f69854388baf3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/90_non_indexed.yml @@ -102,7 +102,6 @@ fetch: esql.query: body: query: 'from test' - version: 2024.04.01 - length: { columns: 18 } - match: { columns.0.name: boolean } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml index 5afb2bf05b2aa..75823d22504f3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/sparse_vector_search.yml @@ -13,6 +13,8 @@ setup: indices.create: index: index-with-sparse-vector body: + settings: + number_of_shards: 1 mappings: properties: source_text: @@ -66,17 +68,17 @@ setup: index: index-with-sparse-vector refresh: true body: | - {"index": {}} + {"index": { "_id": 1 }} {"source_text": "my words comforter", "ml.tokens":{"my":0.5,"words":1.0,"comforter":2.0}} - {"index": {}} + {"index": { "_id": 2 }} {"source_text": "the machine is leaking", "ml.tokens":{"the":0.5,"machine":1.0,"is":0.5,"leaking":1.0}} - {"index": {}} + {"index": { "_id": 3 }} {"source_text": "these are my words", "ml.tokens":{"these":0.5,"are":0.5,"my":0.5,"words":1.0}} - {"index": {}} + {"index": { "_id": 4 }} {"source_text": "the octopus comforter smells", "ml.tokens":{"the":0.5,"octopus":2.0,"comforter":2.0,"smells":1.0}} - {"index": {}} + {"index": { "_id": 5 }} {"source_text": "the octopus comforter is leaking", "ml.tokens":{"the":0.5,"octopus":2.0,"comforter":2.0,"is":0.5,"leaking":1.0}} - {"index": {}} + {"index": { "_id": 6 }} {"source_text": "washing machine smells", "ml.tokens":{"washing":1.0,"machine":1.0,"smells":1.0}} - do: @@ -117,7 +119,7 @@ setup: - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- -"Test named, boosted sparse_vector search with pruning config": +"Test named, boosted sparse_vector search with pruning config - note this will not change returned results due to model limitations": - do: search: index: index-with-sparse-vector @@ -150,7 +152,7 @@ setup: - match: { hits.hits.0._score: 500.0 } --- -"Test sparse_vector search with specified pruning config": +"Test sparse_vector search with specified pruning config - note default values will not change returned results due to model limitations": - do: search: index: index-with-sparse-vector @@ -169,7 +171,7 @@ setup: - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- -"Test sparse_vector search with default pruning config specified": +"Test sparse_vector search with default pruning config specified - note this will not change returned results due to model limitations": - do: search: index: index-with-sparse-vector @@ -241,7 +243,6 @@ setup: only_score_pruned_tokens: false - match: { hits.total.value: 3 } - - match: { hits.hits.0._source.source_text: "my words comforter" } - match: { hits.hits.0._score: 4 } --- @@ -264,7 +265,6 @@ setup: tokens_weight_threshold: 0.4 only_score_pruned_tokens: true - match: { hits.total.value: 3 } - - match: { hits.hits.0._source.source_text: "the machine is leaking" } - match: { hits.hits.0._score: 0.25 } --- diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml new file mode 100644 index 0000000000000..569c310c3b244 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/140_synthetic_source.yml @@ -0,0 +1,251 @@ +--- +"geo_shape": + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + shape: + type: geo_shape + + - do: + index: + index: test + id: "1" + body: + shape: + type: "Polygon" + coordinates: [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]] + + - do: + index: + index: test + id: "2" + body: + shape: "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))" + + - do: + index: + index: test + id: "3" + body: + shape: ["POINT (-77.03653 38.897676)", {"type" : "LineString", "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]}] + + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.shape.type: "Polygon" } + - match: { _source.shape.coordinates: [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]] } + + - do: + get: + index: test + id: "2" + + - match: { _source.shape: "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))" } + + - do: + get: + index: test + id: "3" + + - match: { _source.shape: ["POINT (-77.03653 38.897676)", {"type" : "LineString", "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]}] } + +--- +"geo_shape with ignore_malformed": + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + shape: + type: geo_shape + ignore_malformed: true + + - do: + index: + index: test + id: "1" + body: + shape: 500 + + - do: + index: + index: test + id: "2" + body: + shape: + string: "string" + array: [{ "a": 1 }, { "b": 2 }] + object: { "foo": "bar" } + + - do: + index: + index: test + id: "3" + body: + shape: ["POINT (-77.03653 38.897676)", "potato", "POINT (-71.34 41.12)"] + + - do: + index: + index: test + id: "4" + body: + shape: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] + + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.shape: 500 } + + - do: + get: + index: test + id: "2" + + - match: { _source.shape.string: "string" } + - match: { _source.shape.array: [{ "a": 1 }, { "b": 2 }] } + - match: { _source.shape.object: { "foo": "bar" } } + + - do: + get: + index: test + id: "3" + + - match: { _source.shape: ["POINT (-77.03653 38.897676)", "potato", "POINT (-71.34 41.12)"] } + + - do: + get: + index: test + id: "4" + + - match: { _source.shape: ["POINT (-77.03653 1000)", "POINT (-71.34 41.12)"] } + +--- +"geo_point": + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + point: + type: geo_point + + - do: + index: + index: test + id: "1" + body: + point: + type: "Point" + coordinates: [-71.34, 41.12] + + - do: + index: + index: test + id: "2" + body: + point: "POINT (-71.34 41.12)" + + - do: + index: + index: test + id: "3" + body: + point: + lat: 41.12 + lon: -71.34 + + - do: + index: + index: test + id: "4" + body: + point: [ -71.34, 41.12 ] + + - do: + index: + index: test + id: "5" + body: + point: "41.12,-71.34" + + - do: + index: + index: test + id: "6" + body: + point: "drm3btev3e86" + + + - do: + indices.refresh: {} + + - do: + get: + index: test + id: "1" + + - match: { _source.point.lon: -71.34000004269183 } + - match: { _source.point.lat: 41.1199999647215 } + + - do: + get: + index: test + id: "2" + + - match: { _source.point.lon: -71.34000004269183 } + - match: { _source.point.lat: 41.1199999647215 } + + - do: + get: + index: test + id: "3" + + - match: { _source.point.lon: -71.34000004269183 } + - match: { _source.point.lat: 41.1199999647215 } + + - do: + get: + index: test + id: "4" + + - match: { _source.point.lon: -71.34000004269183 } + - match: { _source.point.lat: 41.1199999647215 } + + - do: + get: + index: test + id: "5" + + - match: { _source.point.lon: -71.34000004269183 } + - match: { _source.point.lat: 41.1199999647215 } + + - do: + get: + index: test + id: "6" + + - match: { _source.point.lon: -71.34000029414892 } + - match: { _source.point.lat: 41.119999922811985 } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 5b6d0f5dbe608..e788a85562e8e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -321,7 +321,7 @@ private void reportClashingNodeAttribute(String attrName) { @Override public void close() { if (transformServices.get() != null) { - transformServices.get().getScheduler().stop(); + transformServices.get().scheduler().stop(); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java index 4b0179a56d6f1..9a7db0fde2d9c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformServices.java @@ -16,18 +16,17 @@ /** * Holder for all transform services that need to get injected via guice. - * + *

* Needed because interfaces can not be injected. * Note: Guice will be removed in the long run. */ -public final class TransformServices { - - private final TransformConfigManager configManager; - private final TransformCheckpointService checkpointService; - private final TransformAuditor auditor; - private final TransformScheduler scheduler; - private final TransformNode transformNode; - +public record TransformServices( + TransformConfigManager configManager, + TransformCheckpointService checkpointService, + TransformAuditor auditor, + TransformScheduler scheduler, + TransformNode transformNode +) { public TransformServices( TransformConfigManager configManager, TransformCheckpointService checkpointService, @@ -39,26 +38,6 @@ public TransformServices( this.checkpointService = Objects.requireNonNull(checkpointService); this.auditor = Objects.requireNonNull(auditor); this.scheduler = Objects.requireNonNull(scheduler); - this.transformNode = transformNode; - } - - public TransformConfigManager getConfigManager() { - return configManager; - } - - public TransformCheckpointService getCheckpointService() { - return checkpointService; - } - - public TransformAuditor getAuditor() { - return auditor; - } - - public TransformScheduler getScheduler() { - return scheduler; - } - - public TransformNode getTransformNode() { - return transformNode; + this.transformNode = Objects.requireNonNull(transformNode); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index dc3f8a514916b..6f26df549efc7 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -73,8 +73,8 @@ public TransportDeleteTransformAction( indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.transformConfigManager = transformServices.getConfigManager(); - this.auditor = transformServices.getAuditor(); + this.transformConfigManager = transformServices.configManager(); + this.auditor = transformServices.auditor(); this.client = client; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java index e4a346833add8..302db8816f4bf 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java @@ -91,8 +91,8 @@ public TransportGetTransformStatsAction( Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.transformConfigManager = transformServices.getConfigManager(); - this.transformCheckpointService = transformServices.getCheckpointService(); + this.transformConfigManager = transformServices.configManager(); + this.transformCheckpointService = transformServices.checkpointService(); this.client = client; this.nodeSettings = settings; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index b802a6522f367..4c978b1504a0f 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -81,11 +81,11 @@ public TransportPutTransformAction( ); this.settings = settings; this.client = client; - this.transformConfigManager = transformServices.getConfigManager(); + this.transformConfigManager = transformServices.configManager(); this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) : null; - this.auditor = transformServices.getAuditor(); + this.auditor = transformServices.auditor(); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index 897ed5c52ec1f..473aafb6efa91 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -84,8 +84,8 @@ public TransportResetTransformAction( indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.transformConfigManager = transformServices.getConfigManager(); - this.auditor = transformServices.getAuditor(); + this.transformConfigManager = transformServices.configManager(); + this.auditor = transformServices.auditor(); this.client = Objects.requireNonNull(client); this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java index ad03632ec0975..4c0fb58390a1d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportScheduleNowTransformAction.java @@ -64,8 +64,8 @@ public TransportScheduleNowTransformAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.transformConfigManager = transformServices.getConfigManager(); - this.transformScheduler = transformServices.getScheduler(); + this.transformConfigManager = transformServices.configManager(); + this.transformScheduler = transformServices.scheduler(); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java index 20902255c0297..23212636dc33c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStartTransformAction.java @@ -117,10 +117,10 @@ protected TransportStartTransformAction( StartTransformAction.Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.transformConfigManager = transformServices.getConfigManager(); + this.transformConfigManager = transformServices.configManager(); this.persistentTasksService = persistentTasksService; this.client = client; - this.auditor = transformServices.getAuditor(); + this.auditor = transformServices.auditor(); this.destIndexSettings = transformExtensionHolder.getTransformExtension().getTransformDestinationIndexSettings(); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index 5ae3ccdbb4354..39874a9b4f9fc 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -90,7 +90,7 @@ public TransportStopTransformAction( EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.threadPool = threadPool; - this.transformConfigManager = transformServices.getConfigManager(); + this.transformConfigManager = transformServices.configManager(); this.persistentTasksService = persistentTasksService; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index e8790407f65a6..f254294cd104c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -92,11 +92,11 @@ public TransportUpdateTransformAction( this.settings = settings; this.client = client; - this.transformConfigManager = transformServices.getConfigManager(); + this.transformConfigManager = transformServices.configManager(); this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) : null; - this.auditor = transformServices.getAuditor(); + this.auditor = transformServices.auditor(); this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.destIndexSettings = transformExtensionHolder.getTransformExtension().getTransformDestinationIndexSettings(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java index eac61dd7d9528..2d0ec21eaee60 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java @@ -83,11 +83,11 @@ public TransportUpgradeTransformsAction( Response::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); - this.transformConfigManager = transformServices.getConfigManager(); + this.transformConfigManager = transformServices.configManager(); this.settings = settings; this.client = client; - this.auditor = transformServices.getAuditor(); + this.auditor = transformServices.auditor(); this.indexNameExpressionResolver = indexNameExpressionResolver; this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerBuilder.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerBuilder.java index 815fc66694ea2..6dcf2657b12a8 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerBuilder.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerBuilder.java @@ -45,7 +45,7 @@ class ClientTransformIndexerBuilder { } ClientTransformIndexer build(ThreadPool threadPool, TransformContext context) { - CheckpointProvider checkpointProvider = transformServices.getCheckpointService() + CheckpointProvider checkpointProvider = transformServices.checkpointService() .getCheckpointProvider(parentTaskClient, transformConfig); return new ClientTransformIndexer( diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 914a302ff096a..c2fb0f12dc53b 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -152,9 +152,9 @@ public TransformIndexer( // important: note that we pass the context object as lock object super(threadPool, initialState, initialPosition, jobStats, context); ExceptionsHelper.requireNonNull(transformServices, "transformServices"); - this.transformsConfigManager = transformServices.getConfigManager(); + this.transformsConfigManager = transformServices.configManager(); this.checkpointProvider = ExceptionsHelper.requireNonNull(checkpointProvider, "checkpointProvider"); - this.auditor = transformServices.getAuditor(); + this.auditor = transformServices.auditor(); this.transformConfig = ExceptionsHelper.requireNonNull(transformConfig, "transformConfig"); this.progress = transformProgress != null ? transformProgress : new TransformProgress(); this.lastCheckpoint = ExceptionsHelper.requireNonNull(lastCheckpoint, "lastCheckpoint"); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index eb1e5034c4940..279c59b8b712d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -98,7 +98,7 @@ public TransformPersistentTasksExecutor( this.threadPool = threadPool; this.clusterService = clusterService; this.resolver = resolver; - this.auditor = transformServices.getAuditor(); + this.auditor = transformServices.auditor(); this.numFailureRetries = Transform.NUM_FAILURE_RETRIES_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(Transform.NUM_FAILURE_RETRIES_SETTING, this::setNumFailureRetries); this.transformExtension = transformExtension; @@ -253,7 +253,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa indexerBuilder.setLastCheckpoint(lastCheckpoint); logger.trace("[{}] Loaded last checkpoint [{}], looking for next checkpoint", transformId, lastCheckpoint.getCheckpoint()); - transformServices.getConfigManager() + transformServices.configManager() .getTransformCheckpoint(transformId, lastCheckpoint.getCheckpoint() + 1, getTransformNextCheckpointListener); }, error -> { String msg = TransformMessages.getMessage(TransformMessages.FAILED_TO_LOAD_TRANSFORM_CHECKPOINT, transformId); @@ -291,11 +291,11 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa if (lastCheckpoint == 0) { logger.trace("[{}] No last checkpoint found, looking for next checkpoint", transformId); - transformServices.getConfigManager() + transformServices.configManager() .getTransformCheckpoint(transformId, lastCheckpoint + 1, getTransformNextCheckpointListener); } else { logger.trace("[{}] Restore last checkpoint: [{}]", transformId, lastCheckpoint); - transformServices.getConfigManager() + transformServices.configManager() .getTransformCheckpoint(transformId, lastCheckpoint, getTransformLastCheckpointListener); } }, @@ -331,7 +331,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa ValidationException validationException = config.validate(null); if (validationException == null) { indexerBuilder.setTransformConfig(config); - transformServices.getConfigManager().getTransformStoredDoc(transformId, false, transformStatsActionListener); + transformServices.configManager().getTransformStoredDoc(transformId, false, transformStatsActionListener); } else { auditor.error(transformId, validationException.getMessage()); markAsFailed( @@ -409,12 +409,12 @@ private ActionListener getTransformConfig( var transformId = params.getId(); // if this call fails for the first time, we are going to retry it indefinitely // register the retry using the TransformScheduler, when the call eventually succeeds, deregister it before returning - var scheduler = transformServices.getScheduler(); + var scheduler = transformServices.scheduler(); scheduler.registerTransform( params, new TransformRetryableStartUpListener<>( transformId, - l -> transformServices.getConfigManager().getTransformConfiguration(transformId, l), + l -> transformServices.configManager().getTransformConfiguration(transformId, l), ActionListener.runBefore(listener, () -> scheduler.deregisterTransform(transformId)), retryListener(task), () -> true, // because we can't determine if this is an unattended transform yet, retry indefinitely @@ -494,11 +494,11 @@ protected AllocatedPersistentTask createTask( parentTaskId, persistentTask.getParams(), (TransformState) persistentTask.getState(), - transformServices.getScheduler(), + transformServices.scheduler(), auditor, threadPool, headers, - transformServices.getTransformNode() + transformServices.transformNode() ); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index c2a526181f90e..07801221adc3b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.DefaultTransformExtension; import org.elasticsearch.xpack.transform.Transform; +import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; @@ -564,7 +565,7 @@ private TransformServices transformServices(TransformConfigManager configManager configManager, mockAuditor ); - return new TransformServices(configManager, transformCheckpointService, mockAuditor, scheduler, null); + return new TransformServices(configManager, transformCheckpointService, mockAuditor, scheduler, mock(TransformNode.class)); } private TransformPersistentTasksExecutor buildTaskExecutor(TransformServices transformServices) { diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index e8da9d1887799..80f1c706a34af 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -1058,7 +1058,6 @@ public void testDisableFieldNameField() throws IOException { Request esql = new Request("POST", "_query"); esql.setJsonEntity(""" { - "version": "2024.04.01", "query": "FROM nofnf | LIMIT 1" }"""); // {"columns":[{"name":"dv","type":"keyword"},{"name":"no_dv","type":"keyword"}],"values":[["test",null]]} diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml index e8cd1321db73b..4c0bbfd7ec139 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml @@ -97,7 +97,6 @@ teardown: esql.query: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT tag | LIMIT 10' - version: '2024.04.01' - match: {columns.0.name: "total"} - match: {columns.0.type: "long"} @@ -128,7 +127,6 @@ teardown: gte: "2023-01-02" lte: "2023-01-03" format: "yyyy-MM-dd" - version: '2024.04.01' - match: {columns.0.name: "_index"} - match: {columns.0.type: "keyword"} @@ -200,7 +198,6 @@ teardown: esql.query: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT total DESC | LIMIT 3 | ENRICH suggestions | KEEP tag, total, phrase' - version: '2024.04.01' - match: {columns.0.name: "tag"} - match: {columns.0.type: "keyword"} diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index c3a72f3652952..b9b0531fa5b68 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -10,6 +10,7 @@ apply plugin: 'elasticsearch.rest-resources' dependencies { testImplementation testArtifact(project(xpackModule('core'))) testImplementation project(':x-pack:qa') + testImplementation project(':modules:reindex') } restResources { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java new file mode 100644 index 0000000000000..4b39f71dea1a9 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.upgrades; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; + +public class SecurityIndexRolesMetadataMigrationIT extends AbstractUpgradeTestCase { + + public void testMetadataMigratedAfterUpgrade() throws Exception { + String testRole = "test-role"; + String metaKey = "test_key"; + String metaValue = "test_value"; + + Map testMetadata = Map.of(metaKey, metaValue); + if (CLUSTER_TYPE == ClusterType.OLD) { + createRole(testRole, testMetadata); + assertEntityInSecurityIndex(testRole); + } + if (CLUSTER_TYPE == ClusterType.UPGRADED) { + refreshSecurityIndex(); + waitForMigrationCompletion(); + assertEntityInSecurityIndex(testRole, metaKey, metaValue); + } + } + + public void testMetadataWrittenAfterUpgradeWithoutMigration() throws IOException { + String testRole = "another-test-role"; + String metaKey = "another-test_key"; + String metaValue = "another-test_value"; + + Map testMetadata = Map.of(metaKey, metaValue); + + if (CLUSTER_TYPE == ClusterType.UPGRADED) { + createRole(testRole, testMetadata); + assertEntityInSecurityIndex(testRole, metaKey, metaValue); + } + } + + @SuppressWarnings("unchecked") + private void assertEntityInSecurityIndex(String roleName, String metaKey, String metaValue) throws IOException { + final Request request = new Request("POST", "/.security/_search"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + request.setJsonEntity( + String.format( + Locale.ROOT, + """ + {"query":{"bool":{"must":[{"term":{"_id":"%s-%s"}},{"term":{"metadata_flattened.%s":"%s"}}]}}}""", + "role", + roleName, + metaKey, + metaValue + ) + ); + addExpectWarningOption(options); + request.setOptions(options); + + Response response = adminClient().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + + Map hits = ((Map) responseMap.get("hits")); + assertEquals(1, ((List) hits.get("hits")).size()); + } + + @SuppressWarnings("unchecked") + private void assertEntityInSecurityIndex(String id) throws IOException { + final Request request = new Request("POST", "/.security/_search"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + request.setJsonEntity(String.format(Locale.ROOT, """ + {"query":{"term":{"_id":"%s-%s"}}}""", "role", id)); + addExpectWarningOption(options); + request.setOptions(options); + Response response = adminClient().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + + Map hits = ((Map) responseMap.get("hits")); + assertEquals(1, ((List) hits.get("hits")).size()); + } + + private void addExpectWarningOption(RequestOptions.Builder options) { + Set expectedWarnings = Set.of( + "this request accesses system indices: [.security-7]," + + " but in a future major version, direct access to system indices will be prevented by default" + ); + + options.setWarningsHandler(warnings -> { + final Set actual = Set.copyOf(warnings); + // Return true if the warnings aren't what we expected; the client will treat them as a fatal error. + return actual.equals(expectedWarnings) == false; + }); + } + + @SuppressWarnings("unchecked") + private void waitForMigrationCompletion() throws Exception { + final Request request = new Request("GET", "_cluster/state/metadata/.security-7"); + assertBusy(() -> { + Response response = adminClient().performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + assertTrue( + ((Map) ((Map) ((Map) responseMap.get("metadata")).get("indices")).get( + ".security-7" + )).containsKey("migration_version") + ); + }); + } + + private void createRole(String roleName, Map metadata) throws IOException { + final Request request = new Request("POST", "/_security/role/" + roleName); + BytesReference source = BytesReference.bytes( + jsonBuilder().map( + Map.of( + RoleDescriptor.Fields.CLUSTER.getPreferredName(), + List.of("cluster:monitor/xpack/license/get"), + RoleDescriptor.Fields.METADATA.getPreferredName(), + metadata + ) + ) + ); + request.setJsonEntity(source.utf8ToString()); + assertOK(adminClient().performRequest(request)); + refreshSecurityIndex(); + } + + private void refreshSecurityIndex() throws IOException { + Request request = new Request("POST", "/.security-7/_refresh"); + RequestOptions.Builder options = request.getOptions().toBuilder(); + addExpectWarningOption(options); + request.setOptions(options); + assertOK(adminClient().performRequest(request)); + } +}